def get_submission(cls, task=None, participation=None, **kwargs): """Create a submission.""" task = task if task is not None \ else cls.get_task(contest=cls.get_contest()) participation = participation if participation is not None \ else cls.get_participation(contest=task.contest) assert task.contest == participation.contest args = { "task": task, "participation": participation, "timestamp": (task.contest.start + timedelta(0, unique_long_id())), } args.update(kwargs) submission = Submission(**args) return submission
def add_submission(self, task=None, participation=None, **kwargs): """Add a submission.""" task = task if task is not None else self.add_task() participation = participation \ if participation is not None \ else self.add_participation(contest=task.contest) assert task.contest == participation.contest args = { "task": task, "participation": participation, "timestamp": (task.contest.start + timedelta(0, unique_long_id())), } args.update(kwargs) submission = Submission(**args) self.session.add(submission) return submission
def add_submission(contest_id, username, task_name, timestamp, files): file_cacher = FileCacher() with SessionGen() as session: participation = session.query(Participation)\ .join(Participation.user)\ .filter(Participation.contest_id == contest_id)\ .filter(User.username == username)\ .first() if participation is None: logging.critical("User `%s' does not exists or " "does not participate in the contest.", username) return False task = session.query(Task)\ .filter(Task.contest_id == contest_id)\ .filter(Task.name == task_name)\ .first() if task is None: logging.critical("Unable to find task `%s'.", task_name) return False elements = [format.filename for format in task.submission_format] for file_ in files: if file_ not in elements: logging.critical("File `%s' is not in the submission format " "for the task.", file_) return False if any(element not in files for element in elements): logger.warning("Not all files from the submission format were " "provided.") # files and elements now coincide. We compute the language for # each file and check that they do not mix. language = None for file_ in files: this_language = filename_to_language(files[file_]) if this_language is None and "%l" in file_: logger.critical("Cannot recognize language for file `%s'.", file_) return False if language is None: language = this_language elif this_language is not None and language != this_language: logger.critical("Mixed-language submission detected.") return False # Store all files from the arguments, and obtain their digests.. file_digests = {} try: for file_ in files: digest = file_cacher.put_file_from_path( files[file_], "Submission file %s sent by %s at %d." % (file_, username, timestamp)) file_digests[file_] = digest except: logger.critical("Error while storing submission's file.", exc_info=True) return False # Create objects in the DB. submission = Submission(make_datetime(timestamp), language, participation=participation, task=task) for filename, digest in file_digests.items(): session.add(File(filename, digest, submission=submission)) session.add(submission) session.commit() return True
def accept_submission(sql_session, file_cacher, participation, task, timestamp, tornado_files, language_name, official): """Process a contestant's request to submit a submission. Parse and validate the data that a contestant sent for a submission and, if all checks and operations succeed, add the result to the database and return it. sql_session (Session): the DB session to use to fetch and add data. file_cacher (FileCacher): the file cacher to use to store the files. participation (Participation): the contestant who is submitting. task (Task): the task on which they are submitting. timestamp (datetime): the moment in time they submitted at. tornado_files ({str: [tornado.httputil.HTTPFile]}): the files they sent in. language_name (str|None): the language they declared their files are in (None means unknown and thus auto-detect). official (bool): whether the submission was sent in during a regular contest phase (and should be counted towards the score/rank) or during the analysis mode. return (Submission): the resulting submission, if all went well. raise (UnacceptableSubmission): if the contestant wasn't allowed to hand in a submission, if the provided data was invalid, if there were critical failures in the process. """ contest = participation.contest assert task.contest is contest # Check whether the contestant is allowed to submit. if not check_max_number(sql_session, contest.max_submission_number, participation, contest=contest): raise UnacceptableSubmission( N_("Too many submissions!"), N_("You have reached the maximum limit of " "at most %d submissions among all tasks.") % contest.max_submission_number) if not check_max_number( sql_session, task.max_submission_number, participation, task=task): raise UnacceptableSubmission( N_("Too many submissions!"), N_("You have reached the maximum limit of " "at most %d submissions on this task.") % task.max_submission_number) if not check_min_interval(sql_session, contest.min_submission_interval, timestamp, participation, contest=contest): raise UnacceptableSubmission( N_("Submissions too frequent!"), N_("Among all tasks, you can submit again " "after %d seconds from last submission.") % contest.min_submission_interval.total_seconds()) if not check_min_interval(sql_session, task.min_submission_interval, timestamp, participation, task=task): raise UnacceptableSubmission( N_("Submissions too frequent!"), N_("For this task, you can submit again " "after %d seconds from last submission.") % task.min_submission_interval.total_seconds()) # Process the data we received and ensure it's valid. required_codenames = set(task.submission_format) try: received_files = extract_files_from_tornado(tornado_files) except InvalidArchive: raise UnacceptableSubmission( N_("Invalid archive format!"), N_("The submitted archive could not be opened.")) try: files, language = match_files_and_language(received_files, language_name, required_codenames, contest.languages) except InvalidFilesOrLanguage: raise UnacceptableSubmission(N_("Invalid submission format!"), N_("Please select the correct files.")) digests = dict() missing_codenames = required_codenames.difference(iterkeys(files)) if len(missing_codenames) > 0: if task.active_dataset.task_type_object.ALLOW_PARTIAL_SUBMISSION: digests = fetch_file_digests_from_previous_submission( sql_session, participation, task, language, missing_codenames) else: raise UnacceptableSubmission( N_("Invalid submission format!"), N_("Please select the correct files.")) if any( len(content) > config.max_submission_length for content in itervalues(files)): raise UnacceptableSubmission( N_("Submission too big!"), N_("Each source file must be at most %d bytes long.") % config.max_submission_length) # All checks done, submission accepted. if config.submit_local_copy: try: store_local_copy(config.submit_local_copy_path, participation, task, timestamp, files) except StorageFailed: logger.error("Submission local copy failed.", exc_info=True) # We now have to send all the files to the destination... try: for codename, content in iteritems(files): digest = file_cacher.put_file_content( content, "Submission file %s sent by %s at %d." % (codename, participation.user.username, make_timestamp(timestamp))) digests[codename] = digest # In case of error, the server aborts the submission except Exception as error: logger.error("Storage failed! %s", error) raise UnacceptableSubmission(N_("Submission storage failed!"), N_("Please try again.")) # All the files are stored, ready to submit! logger.info("All files stored for submission sent by %s", participation.user.username) submission = Submission( timestamp=timestamp, language=language.name if language is not None else None, task=task, participation=participation, official=official) sql_session.add(submission) for codename, digest in iteritems(digests): sql_session.add( File(filename=codename, digest=digest, submission=submission)) return submission
def add_submissions(contest_name, task_name, username, items): """ Add submissions from the given user to the given task in the given contest. Each item corresponds to a submission, and should contain a dictionary which maps formatted file names to paths. For example, in batch tasks the format is "Task.%l", so one submission would be {"Task.%l": "path/to/task.cpp"}. """ # We connect to evaluation service to try and notify it about # the new submissions. Otherwise, it will pick it up only on # the next sweep for missed operations. rs = RemoteServiceClient(ServiceCoord("EvaluationService", 0)) rs.connect() with SessionGen() as session: user = get_user(session, username) contest = get_contest(session, contest_name) participation = get_participation(session, contest, user) task = get_task(session, task_name, contest) elements = set(format_element.filename for format_element in task.submission_format) file_cacher = FileCacher() # We go over all submissions twice. First we validate the # submission format. for submission_dict in items: for (format_file_name, path) in submission_dict.iteritems(): if format_file_name not in elements: raise Exception("Unexpected submission file: %s. " "Expected elements: %s" % (format_file_name, elements)) if not os.path.isfile(path): raise Exception("File not found: %s" % path) # Now add to database. for submission_dict in items: if not submission_dict: continue timestamp = time.time() file_digests = {} language_name = None for (format_file_name, path) in submission_dict.iteritems(): digest = file_cacher.put_file_from_path( path, "Submission file %s sent by %s at %d." % (path, username, timestamp)) file_digests[format_file_name] = digest current_language = filename_to_language(path) if current_language is not None: language_name = current_language.name submission = Submission(make_datetime(timestamp), language_name, participation=participation, task=task) for filename, digest in file_digests.items(): session.add(File(filename, digest, submission=submission)) session.add(submission) session.commit() rs.new_submission(submission_id=submission.id) rs.disconnect()
def post(self, task_name): participation = self.current_user try: task = self.contest.get_task(task_name) except KeyError: raise tornado.web.HTTPError(404) # Alias for easy access contest = self.contest # Enforce maximum number of submissions try: if contest.max_submission_number is not None: submission_c = self.sql_session\ .query(func.count(Submission.id))\ .join(Submission.task)\ .filter(Task.contest == contest)\ .filter(Submission.participation == participation)\ .scalar() if submission_c >= contest.max_submission_number and \ not self.current_user.unrestricted: raise ValueError( self._("You have reached the maximum limit of " "at most %d submissions among all tasks.") % contest.max_submission_number) if task.max_submission_number is not None: submission_t = self.sql_session\ .query(func.count(Submission.id))\ .filter(Submission.task == task)\ .filter(Submission.participation == participation)\ .scalar() if submission_t >= task.max_submission_number and \ not self.current_user.unrestricted: raise ValueError( self._("You have reached the maximum limit of " "at most %d submissions on this task.") % task.max_submission_number) except ValueError as error: self.application.service.add_notification( participation.user.username, self.timestamp, self._("Too many submissions!"), error.message, NOTIFICATION_ERROR) self.redirect("/tasks/%s/submissions" % quote(task.name, safe='')) return # Enforce minimum time between submissions try: if contest.min_submission_interval is not None: last_submission_c = self.sql_session.query(Submission)\ .join(Submission.task)\ .filter(Task.contest == contest)\ .filter(Submission.participation == participation)\ .order_by(Submission.timestamp.desc())\ .first() if last_submission_c is not None and \ self.timestamp - last_submission_c.timestamp < \ contest.min_submission_interval and \ not self.current_user.unrestricted: raise ValueError( self._("Among all tasks, you can submit again " "after %d seconds from last submission.") % contest.min_submission_interval.total_seconds()) # We get the last submission even if we may not need it # for min_submission_interval because we may need it later, # in case this is a ALLOW_PARTIAL_SUBMISSION task. last_submission_t = self.sql_session.query(Submission)\ .filter(Submission.task == task)\ .filter(Submission.participation == participation)\ .order_by(Submission.timestamp.desc())\ .first() if task.min_submission_interval is not None: if last_submission_t is not None and \ self.timestamp - last_submission_t.timestamp < \ task.min_submission_interval and \ not self.current_user.unrestricted: raise ValueError( self._("For this task, you can submit again " "after %d seconds from last submission.") % task.min_submission_interval.total_seconds()) except ValueError as error: self.application.service.add_notification( participation.user.username, self.timestamp, self._("Submissions too frequent!"), error.message, NOTIFICATION_ERROR) self.redirect("/tasks/%s/submissions" % quote(task.name, safe='')) return # Ensure that the user did not submit multiple files with the # same name. if any(len(filename) != 1 for filename in self.request.files.values()): self.application.service.add_notification( participation.user.username, self.timestamp, self._("Invalid submission format!"), self._("Please select the correct files."), NOTIFICATION_ERROR) self.redirect("/tasks/%s/submissions" % quote(task.name, safe='')) return # If the user submitted an archive, extract it and use content # as request.files. if len(self.request.files) == 1 and \ self.request.files.keys()[0] == "submission": archive_data = self.request.files["submission"][0] del self.request.files["submission"] # Create the archive. archive = Archive.from_raw_data(archive_data["body"]) if archive is None: self.application.service.add_notification( participation.user.username, self.timestamp, self._("Invalid archive format!"), self._("The submitted archive could not be opened."), NOTIFICATION_ERROR) self.redirect("/tasks/%s/submissions" % quote(task.name, safe='')) return # Extract the archive. unpacked_dir = archive.unpack() for name in archive.namelist(): filename = os.path.basename(name) body = open(os.path.join(unpacked_dir, filename), "r").read() self.request.files[filename] = [{ 'filename': filename, 'body': body }] archive.cleanup() # This ensure that the user sent one file for every name in # submission format and no more. Less is acceptable if task # type says so. task_type = get_task_type(dataset=task.active_dataset) required = set([sfe.filename for sfe in task.submission_format]) provided = set(self.request.files.keys()) if not (required == provided or (task_type.ALLOW_PARTIAL_SUBMISSION and required.issuperset(provided))): self.application.service.add_notification( participation.user.username, self.timestamp, self._("Invalid submission format!"), self._("Please select the correct files."), NOTIFICATION_ERROR) self.redirect("/tasks/%s/submissions" % quote(task.name, safe='')) return # Add submitted files. After this, files is a dictionary indexed # by *our* filenames (something like "output01.txt" or # "taskname.%l", and whose value is a couple # (user_assigned_filename, content). files = {} for uploaded, data in self.request.files.iteritems(): files[uploaded] = (data[0]["filename"], data[0]["body"]) # If we allow partial submissions, implicitly we recover the # non-submitted files from the previous submission. And put them # in file_digests (i.e. like they have already been sent to FS). submission_lang = None file_digests = {} if task_type.ALLOW_PARTIAL_SUBMISSION and \ last_submission_t is not None: for filename in required.difference(provided): if filename in last_submission_t.files: # If we retrieve a language-dependent file from # last submission, we take not that language must # be the same. if "%l" in filename: submission_lang = last_submission_t.language file_digests[filename] = \ last_submission_t.files[filename].digest # We need to ensure that everytime we have a .%l in our # filenames, the user has the extension of an allowed # language, and that all these are the same (i.e., no # mixed-language submissions). error = None for our_filename in files: user_filename = files[our_filename][0] if our_filename.find(".%l") != -1: lang = filename_to_language(user_filename) if lang is None: error = self._("Cannot recognize submission's language.") break elif submission_lang is not None and \ submission_lang != lang: error = self._("All sources must be in the same language.") break elif lang not in contest.languages: error = self._("Language %s not allowed in this contest." % lang) break else: submission_lang = lang if error is not None: self.application.service.add_notification( participation.user.username, self.timestamp, self._("Invalid submission!"), error, NOTIFICATION_ERROR) self.redirect("/tasks/%s/submissions" % quote(task.name, safe='')) return # Check if submitted files are small enough. if any( [len(f[1]) > config.max_submission_length for f in files.values()]): self.application.service.add_notification( participation.user.username, self.timestamp, self._("Submission too big!"), self._("Each source file must be at most %d bytes long.") % config.max_submission_length, NOTIFICATION_ERROR) self.redirect("/tasks/%s/submissions" % quote(task.name, safe='')) return # All checks done, submission accepted. # Attempt to store the submission locally to be able to # recover a failure. if config.submit_local_copy: try: path = os.path.join( config.submit_local_copy_path.replace( "%s", config.data_dir), participation.user.username) if not os.path.exists(path): os.makedirs(path) # Pickle in ASCII format produces str, not unicode, # therefore we open the file in binary mode. with io.open( os.path.join(path, "%d" % make_timestamp(self.timestamp)), "wb") as file_: pickle.dump((self.contest.id, participation.user.id, task.id, files), file_) except Exception as error: logger.warning("Submission local copy failed.", exc_info=True) # We now have to send all the files to the destination... try: for filename in files: digest = self.application.service.file_cacher.put_file_content( files[filename][1], "Submission file %s sent by %s at %d." % (filename, participation.user.username, make_timestamp(self.timestamp))) file_digests[filename] = digest # In case of error, the server aborts the submission except Exception as error: logger.error("Storage failed! %s", error) self.application.service.add_notification( participation.user.username, self.timestamp, self._("Submission storage failed!"), self._("Please try again."), NOTIFICATION_ERROR) self.redirect("/tasks/%s/submissions" % quote(task.name, safe='')) return # All the files are stored, ready to submit! logger.info("All files stored for submission sent by %s", participation.user.username) submission = Submission(self.timestamp, submission_lang, task=task, participation=participation) for filename, digest in file_digests.items(): self.sql_session.add(File(filename, digest, submission=submission)) self.sql_session.add(submission) self.sql_session.commit() self.application.service.evaluation_service.new_submission( submission_id=submission.id) self.application.service.add_notification( participation.user.username, self.timestamp, self._("Submission received"), self._("Your submission has been received " "and is currently being evaluated."), NOTIFICATION_SUCCESS) # The argument (encripted submission id) is not used by CWS # (nor it discloses information to the user), but it is useful # for automatic testing to obtain the submission id). # FIXME is it actually used by something? self.redirect( "/tasks/%s/submissions?%s" % (quote(task.name, safe=''), encrypt_number(submission.id)))
def submission_handler(self): if local.data['action'] == 'list': task = local.session.query(Task)\ .filter(Task.name == local.data['task_name']).first() if task is None: return 'Not found' if local.user is None: return 'Unauthorized' subs = local.session.query(Submission)\ .filter(Submission.participation_id == local.participation.id)\ .filter(Submission.task_id == task.id)\ .order_by(desc(Submission.timestamp)).all() submissions = [] for s in subs: submission = dict() submission['id'] = s.id submission['task_id'] = s.task_id submission['timestamp'] = make_timestamp(s.timestamp) submission['files'] = [] for name, f in s.files.iteritems(): fi = dict() if s.language is None: fi['name'] = name else: fi['name'] = name.replace('%l', s.language) fi['digest'] = f.digest submission['files'].append(fi) result = s.get_result() for i in ['compilation_outcome', 'evaluation_outcome']: submission[i] = getattr(result, i, None) if result is not None and result.score is not None: submission['score'] = round(result.score, 2) submissions.append(submission) local.resp['submissions'] = submissions elif local.data['action'] == 'details': s = local.session.query(Submission)\ .filter(Submission.id == local.data['id']).first() if s is None: return 'Not found' if local.user is None or s.participation_id != local.participation.id: return 'Unauthorized' submission = dict() submission['id'] = s.id submission['task_id'] = s.task_id submission['timestamp'] = make_timestamp(s.timestamp) submission['language'] = s.language submission['files'] = [] for name, f in s.files.iteritems(): fi = dict() if s.language is None: fi['name'] = name else: fi['name'] = name.replace('%l', s.language) fi['digest'] = f.digest submission['files'].append(fi) result = s.get_result() for i in ['compilation_outcome', 'evaluation_outcome', 'compilation_stdout', 'compilation_stderr', 'compilation_time', 'compilation_memory']: submission[i] = getattr(result, i, None) if result is not None and result.score is not None: submission['score'] = round(result.score, 2) if result is not None and result.score_details is not None: tmp = json.loads(result.score_details) if len(tmp) > 0 and 'text' in tmp[0]: subt = dict() subt['testcases'] = tmp subt['score'] = submission['score'] subt['max_score'] = 100 submission['score_details'] = [subt] else: submission['score_details'] = tmp for subtask in submission['score_details']: for testcase in subtask['testcases']: data = json.loads(testcase['text']) testcase['text'] = data[0] % tuple(data[1:]) else: submission['score_details'] = None local.resp = submission elif local.data['action'] == 'new': if local.user is None: return 'Unauthorized' lastsub = local.session.query(Submission)\ .filter(Submission.participation_id == local.participation.id)\ .order_by(desc(Submission.timestamp)).first() if lastsub is not None and \ make_datetime() - lastsub.timestamp < timedelta(seconds=20): return 'Too frequent submissions!' try: task = local.session.query(Task)\ .join(SocialTask)\ .filter(Task.name == local.data['task_name'])\ .filter(SocialTask.access_level >= local.access_level).first() except KeyError: return 'Not found' def decode_file(f): f['data'] = f['data'].split(',')[-1] f['body'] = b64decode(f['data']) del f['data'] return f if len(local.data['files']) == 1 and \ 'submission' in local.data['files']: archive_data = decode_file(local.data['files']['submission']) del local.data['files']['submission'] # Create the archive. archive = Archive.from_raw_data(archive_data["body"]) if archive is None: return 'Invalid archive!' # Extract the archive. unpacked_dir = archive.unpack() for name in archive.namelist(): filename = os.path.basename(name) body = open(os.path.join(unpacked_dir, filename), "r").read() local.data['files'][filename] = { 'filename': filename, 'body': body } files_sent = local.data['files'] archive.cleanup() else: files_sent = \ dict([(k, decode_file(v)) for k, v in local.data['files'].iteritems()]) # TODO: implement partial submissions (?) # Detect language files = [] sub_lang = None for sfe in task.submission_format: f = files_sent.get(sfe.filename) if f is None: return 'Some files are missing!' if len(f['body']) > config.get("core", "max_submission_length"): return 'The files you sent are too big!' f['name'] = sfe.filename files.append(f) if sfe.filename.endswith('.%l'): language = None for ext, l in SOURCE_EXT_TO_LANGUAGE_MAP.iteritems(): if f['filename'].endswith(ext): language = l if language is None: return 'The language of the files you sent is not ' + \ 'recognized!' elif sub_lang is not None and sub_lang != language: return 'The files you sent are in different languages!' else: sub_lang = language # Add the submission timestamp = make_datetime() submission = Submission(timestamp, sub_lang, participation=local.participation, task=task) for f in files: digest = self.file_cacher.put_file_content( f['body'], 'Submission file %s sent by %s at %d.' % ( f['name'], local.user.username, make_timestamp(timestamp))) local.session.add(File(f['name'], digest, submission=submission)) local.session.add(submission) local.session.commit() # Notify ES self.evaluation_service.new_submission( submission_id=submission.id ) # Answer with submission data local.resp['id'] = submission.id local.resp['task_id'] = submission.task_id local.resp['timestamp'] = make_timestamp(submission.timestamp) local.resp['compilation_outcome'] = None local.resp['evaluation_outcome'] = None local.resp['score'] = None local.resp['files'] = [] for name, f in submission.files.iteritems(): fi = dict() if submission.language is None: fi['name'] = name else: fi['name'] = name.replace('%l', submission.language) fi['digest'] = f.digest local.resp['files'].append(fi) else: return 'Bad request'
def add_submission(contest_id, username, task_name, timestamp, files): file_cacher = FileCacher() with SessionGen() as session: participation = session.query(Participation)\ .join(Participation.user)\ .filter(Participation.contest_id == contest_id)\ .filter(User.username == username)\ .first() if participation is None: logging.critical("User `%s' does not exists or " "does not participate in the contest.", username) return False task = session.query(Task)\ .filter(Task.contest_id == contest_id)\ .filter(Task.name == task_name)\ .first() if task is None: logging.critical("Unable to find task `%s'.", task_name) return False elements = set(task.submission_format) for file_ in files: if file_ not in elements: logging.critical("File `%s' is not in the submission format " "for the task.", file_) return False if any(element not in files for element in elements): logger.warning("Not all files from the submission format were " "provided.") # files is now a subset of elements. # We ensure we can infer a language if the task requires it. language = None need_lang = any(element.find(".%l") != -1 for element in elements) if need_lang: try: language = language_from_submitted_files(files) except ValueError as e: logger.critical(e) return False if language is None: # This might happen in case not all files were provided. logger.critical("Unable to infer language from submission.") return False language_name = None if language is None else language.name # Store all files from the arguments, and obtain their digests.. file_digests = {} try: for file_ in files: digest = file_cacher.put_file_from_path( files[file_], "Submission file %s sent by %s at %d." % (file_, username, timestamp)) file_digests[file_] = digest except Exception as e: logger.critical("Error while storing submission's file: %s.", e) return False # Create objects in the DB. submission = Submission(make_datetime(timestamp), language_name, participation=participation, task=task) for filename, digest in file_digests.items(): session.add(File(filename, digest, submission=submission)) session.add(submission) session.commit() maybe_send_notification(submission.id) return True
def post(self, task_name): participation = self.current_user try: task = self.contest.get_task(task_name) except KeyError: raise tornado.web.HTTPError(404) self.fallback_page = ["tasks", task.name, "submissions"] # Alias for easy access contest = self.contest # Enforce maximum number of submissions try: if contest.max_submission_number is not None: submission_c = self.sql_session\ .query(func.count(Submission.id))\ .join(Submission.task)\ .filter(Task.contest == contest)\ .filter(Submission.participation == participation)\ .scalar() if submission_c >= contest.max_submission_number and \ not self.current_user.unrestricted: raise ValueError( self._("You have reached the maximum limit of " "at most %d submissions among all tasks.") % contest.max_submission_number) if task.max_submission_number is not None: submission_t = self.sql_session\ .query(func.count(Submission.id))\ .filter(Submission.task == task)\ .filter(Submission.participation == participation)\ .scalar() if submission_t >= task.max_submission_number and \ not self.current_user.unrestricted: raise ValueError( self._("You have reached the maximum limit of " "at most %d submissions on this task.") % task.max_submission_number) except ValueError as error: self._send_error(self._("Too many submissions!"), str(error)) return # Enforce minimum time between submissions try: if contest.min_submission_interval is not None: last_submission_c = self.sql_session.query(Submission)\ .join(Submission.task)\ .filter(Task.contest == contest)\ .filter(Submission.participation == participation)\ .order_by(Submission.timestamp.desc())\ .first() if last_submission_c is not None and \ self.timestamp - last_submission_c.timestamp < \ contest.min_submission_interval and \ not self.current_user.unrestricted: raise ValueError( self._("Among all tasks, you can submit again " "after %d seconds from last submission.") % contest.min_submission_interval.total_seconds()) # We get the last submission even if we may not need it # for min_submission_interval because we may need it later, # in case this is a ALLOW_PARTIAL_SUBMISSION task. last_submission_t = self.sql_session.query(Submission)\ .filter(Submission.task == task)\ .filter(Submission.participation == participation)\ .order_by(Submission.timestamp.desc())\ .first() if task.min_submission_interval is not None: if last_submission_t is not None and \ self.timestamp - last_submission_t.timestamp < \ task.min_submission_interval and \ not self.current_user.unrestricted: raise ValueError( self._("For this task, you can submit again " "after %d seconds from last submission.") % task.min_submission_interval.total_seconds()) except ValueError as error: self._send_error(self._("Submissions too frequent!"), str(error)) return # Required files from the user. required = set([sfe.filename for sfe in task.submission_format]) # Ensure that the user did not submit multiple files with the # same name. if any( len(filename) != 1 for filename in itervalues(self.request.files)): self._send_error(self._("Invalid submission format!"), self._("Please select the correct files.")) return # If the user submitted an archive, extract it and use content # as request.files. But only valid for "output only" (i.e., # not for submissions requiring a programming language # identification). if len(self.request.files) == 1 and \ next(iterkeys(self.request.files)) == "submission": if any(filename.endswith(".%l") for filename in required): self._send_error(self._("Invalid submission format!"), self._("Please select the correct files."), task) return archive_data = self.request.files["submission"][0] del self.request.files["submission"] # Create the archive. archive = Archive.from_raw_data(archive_data["body"]) if archive is None: self._send_error( self._("Invalid archive format!"), self._("The submitted archive could not be opened.")) return # Extract the archive. unpacked_dir = archive.unpack() for name in archive.namelist(): filename = os.path.basename(name) body = open(os.path.join(unpacked_dir, filename), "r").read() self.request.files[filename] = [{ 'filename': filename, 'body': body }] archive.cleanup() # This ensure that the user sent one file for every name in # submission format and no more. Less is acceptable if task # type says so. task_type = get_task_type(dataset=task.active_dataset) provided = set(iterkeys(self.request.files)) if not (required == provided or (task_type.ALLOW_PARTIAL_SUBMISSION and required.issuperset(provided))): self._send_error(self._("Invalid submission format!"), self._("Please select the correct files.")) return # Add submitted files. After this, files is a dictionary indexed # by *our* filenames (something like "output01.txt" or # "taskname.%l", and whose value is a couple # (user_assigned_filename, content). files = {} for uploaded, data in iteritems(self.request.files): files[uploaded] = (data[0]["filename"], data[0]["body"]) # Read the submission language provided in the request; we # integrate it with the language fetched from the previous # submission (if we use it) and later make sure it is # recognized and allowed. submission_lang = self.get_argument("language", None) need_lang = any( our_filename.find(".%l") != -1 for our_filename in files) # If we allow partial submissions, we implicitly recover the # non-submitted files from the previous submission (if it has # the same programming language of the current one), and put # them in file_digests (since they are already in FS). file_digests = {} if task_type.ALLOW_PARTIAL_SUBMISSION and \ last_submission_t is not None and \ (submission_lang is None or submission_lang == last_submission_t.language): submission_lang = last_submission_t.language for filename in required.difference(provided): if filename in last_submission_t.files: file_digests[filename] = \ last_submission_t.files[filename].digest # Throw an error if task needs a language, but we don't have # it or it is not allowed / recognized. if need_lang: error = None if submission_lang is None: error = self._("Cannot recognize the submission language.") elif submission_lang not in contest.languages: error = self._("Language %s not allowed in this contest.") \ % submission_lang if error is not None: self._send_error(self._("Invalid submission!"), error) return # Check if submitted files are small enough. if any([ len(f[1]) > config.max_submission_length for f in itervalues(files) ]): self._send_error( self._("Submission too big!"), self._("Each source file must be at most %d bytes long.") % config.max_submission_length) return # All checks done, submission accepted. # Attempt to store the submission locally to be able to # recover a failure. if config.submit_local_copy: try: path = os.path.join( config.submit_local_copy_path.replace( "%s", config.data_dir), participation.user.username) if not os.path.exists(path): os.makedirs(path) # Pickle in ASCII format produces str, not unicode, # therefore we open the file in binary mode. with io.open( os.path.join(path, "%d" % make_timestamp(self.timestamp)), "wb") as file_: pickle.dump((self.contest.id, participation.user.id, task.id, files), file_) except Exception as error: logger.warning("Submission local copy failed.", exc_info=True) # We now have to send all the files to the destination... try: for filename in files: digest = self.service.file_cacher.put_file_content( files[filename][1], "Submission file %s sent by %s at %d." % (filename, participation.user.username, make_timestamp(self.timestamp))) file_digests[filename] = digest # In case of error, the server aborts the submission except Exception as error: logger.error("Storage failed! %s", error) self._send_error(self._("Submission storage failed!"), self._("Please try again.")) return # All the files are stored, ready to submit! logger.info("All files stored for submission sent by %s", participation.user.username) # Only set the official bit when the user can compete and we are not in # analysis mode. official = self.r_params["actual_phase"] == 0 submission = Submission(self.timestamp, submission_lang, task=task, participation=participation, official=official) for filename, digest in iteritems(file_digests): self.sql_session.add(File(filename, digest, submission=submission)) self.sql_session.add(submission) self.sql_session.commit() self.service.evaluation_service.new_submission( submission_id=submission.id) self.service.add_notification( participation.user.username, self.timestamp, self._("Submission received"), self._("Your submission has been received " "and is currently being evaluated."), NOTIFICATION_SUCCESS) # The argument (encripted submission id) is not used by CWS # (nor it discloses information to the user), but it is useful # for automatic testing to obtain the submission id). self.redirect( self.contest_url(*self.fallback_page, submission_id=encrypt_number( submission.id, config.secret_key)))