def get(self, task_id): task = Task.get_from_id(task_id, self.sql_session) if task is None or task.contest != self.contest: raise tornado.web.HTTPError(404) statement, name = task.statement, task.name self.sql_session.close() self.fetch(statement, "application/pdf", "%s.pdf" % name)
def dataset_updated(self, task_id): """This function updates RWS with new data about a task. It should be called after the live dataset of a task is changed. task_id (int): id of the task whose dataset has changed. """ with SessionGen(commit=False) as session: task = Task.get_from_id(task_id, session) dataset_id = task.active_dataset_id logger.info("Dataset update for task %d (dataset now is %d)." % ( task_id, dataset_id)) submission_ids = get_submissions(self.contest_id, task_id=task_id) subchanges = [] with SessionGen(commit=False) as session: for submission_id in submission_ids: submission = Submission.get_from_id(submission_id, session) submission_result = SubmissionResult.get_from_id( (submission_id, dataset_id), session) if submission_result is None: # Not yet compiled, evaluated or scored. score = None ranking_score_details = None else: score = submission_result.score try: ranking_score_details = json.loads( submission_result.ranking_score_details) except (json.decoder.JSONDecodeError, TypeError): # It may be blank. ranking_score_details = None # Data to send to remote rankings. subchange_id = "%s%ss" % \ (int(make_timestamp(submission.timestamp)), submission_id) subchange_put_data = { "submission": encode_id(submission_id), "time": int(make_timestamp(submission.timestamp))} # We're sending the unrounded score to RWS if score is not None: subchange_put_data["score"] = score if ranking_score_details is not None: subchange_put_data["extra"] = ranking_score_details subchanges.append((subchange_id, subchange_put_data)) # Adding operations to the queue. with self.operation_queue_lock: for ranking in self.rankings: for subchange_id, data in subchanges: self.subchange_queue.setdefault( ranking, dict())[encode_id(subchange_id)] = data
def get(self, task_id): self.r_params["task"] = Task.get_from_id(task_id, self.sql_session) if self.r_params["task"] is None or \ self.r_params["task"].contest != self.contest: raise tornado.web.HTTPError(404) self.r_params["submissions"] = self.sql_session.query(Submission)\ .filter_by(user=self.current_user)\ .filter_by(task=self.r_params["task"]).all() self.render("task.html", **self.r_params)
def extract_complexity(task_id, file_lengther=None): """Extract the complexity of all submissions of the task. The results are stored in a file task_<id>.info task_id (int): the id of the task we are interested in. file_lengther (class): a File-like object that tell the dimension of the input (see example above for how to write one). return (int): 0 if operation was successful. """ with SessionGen() as session: task = Task.get_from_id(task_id, session) if task is None: return -1 # Extracting the length of the testcase. file_cacher = FileCacher() testcases_lengths = [ file_length(testcase.input, file_cacher, file_lengther) for testcase in task.testcases ] file_cacher.purge_cache() # Compute the complexity of the solutions. with open("task_%s.info" % task_id, "wt") as info: for submission in task.contest.get_submissions(): if submission.task_id == task_id and \ submission.evaluated(): print submission.user.username result = extract_complexity_submission( testcases_lengths, submission) if result[1] is None: continue info.write("Submission: %s" % submission.id) info.write(" - user: %15s" % submission.user.username) info.write(" - task: %s" % task.name) if result[0] is not None: info.write(" - score: %6.2lf" % result[0]) info.write(" - complexity: %20s" % complexity_to_string(result[1])) if result[2] is not None: info.write(" - confidence %5.1lf" % result[2]) info.write("\n") return 0
def dataset_updated(self, task_id): """This function updates RWS with new data about a task. It should be called after the live dataset of a task is changed. task_id (int): id of the task whose dataset has changed. """ with SessionGen(commit=False) as session: task = Task.get_from_id(task_id, session) dataset = task.active_dataset logger.info("Dataset update for task %d (dataset now is %d)." % ( task.id, dataset.id)) for submission in task.submissions: # Update RWS. self.rankings_send_score(submission)
def extract_complexity(task_id, file_lengther=None): """Extract the complexity of all submissions of the task. The results are stored in a file task_<id>.info task_id (int): the id of the task we are interested in. file_lengther (class): a File-like object that tell the dimension of the input (see example above for how to write one). return (int): 0 if operation was successful. """ with SessionGen() as session: task = Task.get_from_id(task_id, session) if task is None: return -1 # Extracting the length of the testcase. file_cacher = FileCacher() testcases_lengths = [file_length(testcase.input, file_cacher, file_lengther) for testcase in task.testcases] file_cacher.purge_cache() # Compute the complexity of the solutions. with open("task_%s.info" % task_id, "wt") as info: for submission in task.contest.get_submissions(): if submission.task_id == task_id and \ submission.evaluated(): print submission.user.username result = extract_complexity_submission(testcases_lengths, submission) if result[1] is None: continue info.write("Submission: %s" % submission.id) info.write(" - user: %15s" % submission.user.username) info.write(" - task: %s" % task.name) if result[0] is not None: info.write(" - score: %6.2lf" % result[0]) info.write(" - complexity: %20s" % complexity_to_string(result[1])) if result[2] is not None: info.write(" - confidence %5.1lf" % result[2]) info.write("\n") return 0
def post(self, task_id): self.timestamp = self.r_params["timestamp"] self.task_id = task_id self.task = Task.get_from_id(task_id, self.sql_session) if self.current_user is None or self.task is None or self.task.contest != self.contest: raise tornado.web.HTTPError(404) # Enforce minimum time between submissions for the same task. last_submission = ( self.sql_session.query(Submission) .filter_by(task_id=self.task.id) .filter_by(user_id=self.current_user.id) .order_by(Submission.timestamp.desc()) .first() ) if last_submission is not None and self.timestamp - last_submission.timestamp < config.min_submission_interval: self.application.service.add_notification( self.current_user.username, int(time.time()), self._("Submissions too frequent!"), self._("For each task, you can submit " "again after %s seconds from last submission.") % config.min_submission_interval, ) self.redirect("/tasks/%s" % encrypt_number(self.task.id)) return # Ensure that the user did not submit multiple files with the # same name. if any(len(x) != 1 for x in self.request.files.values()): self.application.service.add_notification( self.current_user.username, int(time.time()), self._("Invalid submission format!"), self._("Please select the correct files."), ) self.redirect("/tasks/%s" % encrypt_number(self.task.id)) return # If the user submitted an archive, extract it and use content # as request.files. if len(self.request.files) == 1 and self.request.files.keys()[0] == "submission": archive_data = self.request.files["submission"][0] del self.request.files["submission"] # Extract the files from the archive. temp_archive_file, temp_archive_filename = tempfile.mkstemp(config.temp_dir) with os.fdopen(temp_archive_file, "w") as temp_archive_file: temp_archive_file.write(archive_data["body"]) archive_contents = extract_archive(temp_archive_filename, archive_data["filename"]) if archive_contents is None: self.application.service.add_notification( self.current_user.username, int(time.time()), self._("Invalid archive format!"), self._("The submitted archive could not be opened."), ) self.redirect("/tasks/%s" % encrypt_number(self.task.id)) return for item in archive_contents: self.request.files[item["filename"]] = [item] # This ensure that the user sent one file for every name in # submission format and no more. Less is acceptable if task # type says so. task_type = get_task_type(task=self.task) required = set([x.filename for x in self.task.submission_format]) provided = set(self.request.files.keys()) if not (required == provided or (task_type.ALLOW_PARTIAL_SUBMISSION and required.issuperset(provided))): self.application.service.add_notification( self.current_user.username, int(time.time()), self._("Invalid submission format!"), self._("Please select the correct files."), ) self.redirect("/tasks/%s" % encrypt_number(self.task.id)) return # Add submitted files. After this, self.files is a dictionary # indexed by *our* filenames (something like "output01.txt" or # "taskname.%l", and whose value is a couple # (user_assigned_filename, content). self.files = {} for uploaded, data in self.request.files.iteritems(): self.files[uploaded] = (data[0]["filename"], data[0]["body"]) # If we allow partial submissions, implicitly we recover the # non-submitted files from the previous submission. And put # them in self.file_digests (i.e., like they have already been # sent to FS). self.submission_lang = None self.file_digests = {} self.retrieved = 0 if task_type.ALLOW_PARTIAL_SUBMISSION and last_submission is not None: for filename in required.difference(provided): if filename in last_submission.files: # If we retrieve a language-dependent file from # last submission, we take not that language must # be the same. if "%l" in filename: self.submission_lang = last_submission.language self.file_digests[filename] = last_submission.files[filename].digest self.retrieved += 1 # We need to ensure that everytime we have a .%l in our # filenames, the user has one amongst ".cpp", ".c", or ".pas, # and that all these are the same (i.e., no mixed-language # submissions). def which_language(user_filename): """Determine the language of user_filename from its extension. user_filename (string): the file to test. return (string): the extension of user_filename, or None if it is not a recognized language. """ extension = os.path.splitext(user_filename)[1] try: return Submission.LANGUAGES_MAP[extension] except KeyError: return None error = None for our_filename in self.files: user_filename = self.files[our_filename][0] if our_filename.find(".%l") != -1: lang = which_language(user_filename) if lang is None: error = self._("Cannot recognize submission's language.") break elif self.submission_lang is not None and self.submission_lang != lang: error = self._("All sources must be in the same language.") break else: self.submission_lang = lang if error is not None: self.application.service.add_notification( self.current_user.username, int(time.time()), self._("Invalid submission!"), error ) self.redirect("/tasks/%s" % encrypt_number(self.task.id)) return # Check if submitted files are small enough. if any([len(f[1]) > config.max_submission_length for f in self.files.values()]): self.application.service.add_notification( self.current_user.username, int(time.time()), self._("Submission too big!"), self._("Each files must be at most %d bytes long.") % config.max_submission_length, ) self.redirect("/tasks/%s" % encrypt_number(self.task.id)) return # All checks done, submission accepted. # Attempt to store the submission locally to be able to # recover a failure. self.local_copy_saved = False if config.submit_local_copy: try: path = os.path.join( config.submit_local_copy_path.replace("%s", config.data_dir), self.current_user.username ) if not os.path.exists(path): os.makedirs(path) with codecs.open(os.path.join(path, str(self.timestamp)), "w", "utf-8") as file_: pickle.dump((self.contest.id, self.current_user.id, self.task, self.files), file_) self.local_copy_saved = True except Exception as error: logger.error("Submission local copy failed - %s" % traceback.format_exc()) self.username = self.current_user.username self.sql_session.close() # We now have to send all the files to the destination... try: for filename in self.files: digest = self.application.service.file_cacher.put_file( description="Submission file %s sent by %s at %d." % (filename, self.username, self.timestamp), binary_data=self.files[filename][1], ) self.file_digests[filename] = digest # In case of error, the server aborts the submission except Exception as error: logger.error("Storage failed! %s" % error) if self.local_copy_saved: message = "In case of emergency, this server has a local copy." else: message = "No local copy stored! Your submission was ignored." self.application.service.add_notification( self.username, int(time.time()), self._("Submission storage failed!"), self._(message) ) self.redirect("/tasks/%s" % encrypt_number(self.task_id)) # All the files are stored, ready to submit! self.sql_session = Session() current_user = self.get_current_user() self.task = Task.get_from_id(self.task_id, self.sql_session) logger.info("All files stored for submission sent by %s" % self.username) submission = Submission( user=current_user, task=self.task, timestamp=self.timestamp, files={}, language=self.submission_lang ) for filename, digest in self.file_digests.items(): self.sql_session.add(File(digest, filename, submission)) self.sql_session.add(submission) self.sql_session.commit() self.r_params["submission"] = submission self.r_params["warned"] = False self.application.service.evaluation_service.new_submission(submission_id=submission.id) self.application.service.add_notification( self.username, int(time.time()), self._("Submission received"), self._("Your submission has been received " "and is currently being evaluated."), ) # The argument (encripted submission id) is not used by CWS # (nor it discloses information to the user), but it is useful # for automatic testing to obtain the submission id). self.redirect("/tasks/%s?%s" % (encrypt_number(self.task.id), encrypt_number(submission.id)))