class AddManagerHandler(BaseHandler): """Add a manager to a task. """ def get(self, task_id): task = self.safe_get_item(Task, task_id) self.contest = task.contest r_params = self.render_params() r_params["task"] = task self.render("add_manager.html", **r_params) def post(self, task_id): task = self.safe_get_item(Task, task_id) self.contest = task.contest manager = self.request.files["manager"][0] task_name = task.name self.sql_session.close() try: digest = self.application.service.file_cacher.put_file( binary_data=manager["body"], description="Task manager for %s" % task_name) except Exception as error: self.application.service.add_notification(int(time.time()), "Manager storage failed", repr(error)) self.redirect("/add_manager/%s" % task_id) return self.sql_session = Session() task = self.safe_get_item(Task, task_id) self.sql_session.add(Manager(digest, manager["filename"], task)) self.sql_session.commit() self.redirect("/task/%s" % task_id)
class AddStatementHandler(BaseHandler): """Add a statement to a task. """ @catch_exceptions def get(self, task_id): task = self.safe_get_item(Task, task_id) self.contest = task.contest r_params = self.render_params() r_params["task"] = task self.render("add_statement.html", **r_params) @catch_exceptions def post(self, task_id): task = self.safe_get_item(Task, task_id) self.contest = task.contest language = self.get_argument("language", None) if language is None: self.application.service.add_notification(int(time.time()), "No language code specified", "The language code can be any string.") self.redirect("/add_statement/%s" % task_id) return statement = self.request.files["statement"][0] if not statement["filename"].endswith(".pdf"): self.application.service.add_notification(int(time.time()), "Invalid task statement", "The task statement must be a .pdf file.") self.redirect("/add_statement/%s" % task_id) return task_name = task.name self.sql_session.close() try: digest = self.application.service.file_cacher.put_file( binary_data=statement["body"], description="Statement for task %s (lang: %s)" % (task_name, language)) except Exception as error: self.application.service.add_notification(int(time.time()), "Task statement storage failed", repr(error)) self.redirect("/add_statement/%s" % task_id) return # TODO verify that there's no other Statement with that language # otherwise we'd trigger an IntegrityError for constraint violation self.sql_session = Session() task = self.safe_get_item(Task, task_id) statement = Statement(digest, language, task) self.sql_session.add(statement) self.sql_session.commit() self.redirect("/task/%s" % task_id)
def post(self, task_id): task = self.safe_get_item(Task, task_id) self.contest = task.contest _input = self.request.files["input"][0] output = self.request.files["output"][0] public = self.get_argument("public", None) is not None task_name = task.name self.sql_session.close() try: input_digest = self.application.service.file_cacher.put_file( binary_data=_input["body"], description="Testcase input for task %s" % task_name) output_digest = self.application.service.file_cacher.put_file( binary_data=output["body"], description="Testcase output for task %s" % task_name) except Exception as error: self.application.service.add_notification(int(time.time()), "Testcase storage failed", repr(error)) self.redirect("/add_testcase/%s" % task_id) return self.sql_session = Session() task = self.safe_get_item(Task, task_id) self.contest = task.contest self.sql_session.add(Testcase( input_digest, output_digest, len(task.testcases), public, task)) self.sql_session.commit() self.redirect("/task/%s" % task_id)
def post(self, task_id): task = self.safe_get_item(Task, task_id) self.contest = task.contest attachment = self.request.files["attachment"][0] task_name = task.name self.sql_session.close() try: digest = self.application.service.file_cacher.put_file( binary_data=attachment["body"], description="Task attachment for %s" % task_name) except Exception as error: self.application.service.add_notification(int(time.time()), "Attachment storage failed", repr(error)) self.redirect("/add_attachment/%s" % task_id) return # TODO verify that there's no other Attachment with that filename # otherwise we'd trigger an IntegrityError for constraint violation self.sql_session = Session() task = self.safe_get_item(Task, task_id) self.sql_session.add(Attachment(digest, attachment["filename"], task)) self.sql_session.commit() self.redirect("/task/%s" % task_id)
def post(self, task_id): task = self.safe_get_item(Task, task_id) self.contest = task.contest statement = self.request.files["statement"][0] if not statement["filename"].endswith(".pdf"): self.application.service.add_notification(int(time.time()), "Invalid task statement", "The task statement must be a .pdf file.") self.redirect("/add_statement/%s" % task_id) return task_name = task.name self.sql_session.close() try: digest = self.application.service.file_cacher.put_file( binary_data=statement["body"], description="Task statement for %s" % task_name) except Exception as error: self.application.service.add_notification(int(time.time()), "Task statement storage failed", repr(error)) self.redirect("/add_statement/%s" % task_id) return self.sql_session = Session() task = self.safe_get_item(Task, task_id) task.statement = digest self.sql_session.commit() self.redirect("/task/%s" % task_id)
def prepare(self): """This method is executed at the beginning of each request. """ self.set_header("Cache-Control", "no-cache, must-revalidate") self.sql_session = Session() self.contest = Contest.get_from_id(self.application.service.contest, self.sql_session) localization_dir = os.path.join(os.path.dirname(__file__), "mo") if os.path.exists(localization_dir): tornado.locale.load_gettext_translations(localization_dir, "cms") self._ = self.get_browser_locale().translate self.r_params = self.render_params()
def prepare(self): """This method is executed at the beginning of each request. """ # Attempt to update the contest and all its references # If this fails, the request terminates. self.set_header("Cache-Control", "no-cache, must-revalidate") self.sql_session = Session() self.sql_session.expire_all() self.contest = None localization_dir = os.path.join(os.path.dirname(__file__), "mo") if os.path.exists(localization_dir): tornado.locale.load_gettext_translations(localization_dir, "cms")
class BaseHandler(CommonRequestHandler): """Base RequestHandler for this application. All the RequestHandler classes in this application should be a child of this class. """ # Whether the login cookie duration has to be refreshed when # this handler is called. Useful to filter asynchronous # requests. refresh_cookie = True @catch_exceptions def prepare(self): """This method is executed at the beginning of each request. """ self.set_header("Cache-Control", "no-cache, must-revalidate") self.sql_session = Session() self.contest = Contest.get_from_id(self.application.service.contest, self.sql_session) localization_dir = os.path.join(os.path.dirname(__file__), "mo") if os.path.exists(localization_dir): tornado.locale.load_gettext_translations(localization_dir, "cms") self._ = self.get_browser_locale().translate self.r_params = self.render_params() def get_current_user(self): """Gets the current user logged in from the cookies If a valid cookie is retrieved, return a User object with the username specified in the cookie. Otherwise, return None. """ timestamp = time.time() if self.get_secure_cookie("login") is None: return None try: cookie = pickle.loads(self.get_secure_cookie("login")) username = str(cookie[0]) last_update = int(cookie[1]) except: self.clear_cookie("login") return None # Check if the cookie is expired. if timestamp - last_update > config.cookie_duration: self.clear_cookie("login") return None user = self.sql_session.query(User).filter_by(contest=self.contest).filter_by(username=username).first() if user is None: self.clear_cookie("login") return None if self.refresh_cookie: self.set_secure_cookie("login", pickle.dumps((user.username, int(time.time()))), expires_days=None) # If this is the first time we see user during the active # phase of the contest, we note that his/her time starts from # now. if self.contest.phase(timestamp) == 0 and user.starting_time is None: logger.info("Starting now for user %s" % user.username) user.starting_time = timestamp self.sql_session.commit() return user def render_params(self): """Return the default render params used by almost all handlers. return (dict): default render params """ ret = {} ret["timestamp"] = int(time.time()) ret["contest"] = self.contest ret["url_root"] = get_url_root(self.request.path) ret["valid_phase_end"] = self.contest.stop if self.contest is not None: ret["phase"] = self.contest.phase(ret["timestamp"]) # If we have a user logged in, the contest may be ended # before contest.stop if the user has finished the time # allocated for him/her. if ret["phase"] == 0 and self.current_user is not None and self.contest.per_user_time is not None: delta = ret["timestamp"] - self.current_user.starting_time if delta >= self.contest.per_user_time: ret["phase"] = 1 user_end_time = self.current_user.starting_time + self.contest.per_user_time if user_end_time < self.contest.stop: ret["valid_phase_end"] = user_end_time ret["contest_list"] = self.sql_session.query(Contest).all() ret["cookie"] = str(self.cookies) return ret def finish(self, *args, **kwds): """ Finishes this response, ending the HTTP request. We override this method in order to properly close the database. """ if hasattr(self, "sql_session"): logger.debug("Closing SQL connection.") try: self.sql_session.close() except Exception as error: logger.warning("Couldn't close SQL connection: %r" % error) tornado.web.RequestHandler.finish(self, *args, **kwds)
def post(self, task_id): self.timestamp = self.r_params["timestamp"] self.task_id = task_id self.task = Task.get_from_id(task_id, self.sql_session) if self.current_user is None or self.task is None or self.task.contest != self.contest: raise tornado.web.HTTPError(404) # Enforce minimum time between submissions for the same task. last_submission = ( self.sql_session.query(Submission) .filter_by(task_id=self.task.id) .filter_by(user_id=self.current_user.id) .order_by(Submission.timestamp.desc()) .first() ) if last_submission is not None and self.timestamp - last_submission.timestamp < config.min_submission_interval: self.application.service.add_notification( self.current_user.username, int(time.time()), self._("Submissions too frequent!"), self._("For each task, you can submit " "again after %s seconds from last submission.") % config.min_submission_interval, ) self.redirect("/tasks/%s" % encrypt_number(self.task.id)) return # Ensure that the user did not submit multiple files with the # same name. if any(len(x) != 1 for x in self.request.files.values()): self.application.service.add_notification( self.current_user.username, int(time.time()), self._("Invalid submission format!"), self._("Please select the correct files."), ) self.redirect("/tasks/%s" % encrypt_number(self.task.id)) return # If the user submitted an archive, extract it and use content # as request.files. if len(self.request.files) == 1 and self.request.files.keys()[0] == "submission": archive_data = self.request.files["submission"][0] del self.request.files["submission"] # Extract the files from the archive. temp_archive_file, temp_archive_filename = tempfile.mkstemp(config.temp_dir) with os.fdopen(temp_archive_file, "w") as temp_archive_file: temp_archive_file.write(archive_data["body"]) archive_contents = extract_archive(temp_archive_filename, archive_data["filename"]) if archive_contents is None: self.application.service.add_notification( self.current_user.username, int(time.time()), self._("Invalid archive format!"), self._("The submitted archive could not be opened."), ) self.redirect("/tasks/%s" % encrypt_number(self.task.id)) return for item in archive_contents: self.request.files[item["filename"]] = [item] # This ensure that the user sent one file for every name in # submission format and no more. Less is acceptable if task # type says so. task_type = get_task_type(task=self.task) required = set([x.filename for x in self.task.submission_format]) provided = set(self.request.files.keys()) if not (required == provided or (task_type.ALLOW_PARTIAL_SUBMISSION and required.issuperset(provided))): self.application.service.add_notification( self.current_user.username, int(time.time()), self._("Invalid submission format!"), self._("Please select the correct files."), ) self.redirect("/tasks/%s" % encrypt_number(self.task.id)) return # Add submitted files. After this, self.files is a dictionary # indexed by *our* filenames (something like "output01.txt" or # "taskname.%l", and whose value is a couple # (user_assigned_filename, content). self.files = {} for uploaded, data in self.request.files.iteritems(): self.files[uploaded] = (data[0]["filename"], data[0]["body"]) # If we allow partial submissions, implicitly we recover the # non-submitted files from the previous submission. And put # them in self.file_digests (i.e., like they have already been # sent to FS). self.submission_lang = None self.file_digests = {} self.retrieved = 0 if task_type.ALLOW_PARTIAL_SUBMISSION and last_submission is not None: for filename in required.difference(provided): if filename in last_submission.files: # If we retrieve a language-dependent file from # last submission, we take not that language must # be the same. if "%l" in filename: self.submission_lang = last_submission.language self.file_digests[filename] = last_submission.files[filename].digest self.retrieved += 1 # We need to ensure that everytime we have a .%l in our # filenames, the user has one amongst ".cpp", ".c", or ".pas, # and that all these are the same (i.e., no mixed-language # submissions). def which_language(user_filename): """Determine the language of user_filename from its extension. user_filename (string): the file to test. return (string): the extension of user_filename, or None if it is not a recognized language. """ extension = os.path.splitext(user_filename)[1] try: return Submission.LANGUAGES_MAP[extension] except KeyError: return None error = None for our_filename in self.files: user_filename = self.files[our_filename][0] if our_filename.find(".%l") != -1: lang = which_language(user_filename) if lang is None: error = self._("Cannot recognize submission's language.") break elif self.submission_lang is not None and self.submission_lang != lang: error = self._("All sources must be in the same language.") break else: self.submission_lang = lang if error is not None: self.application.service.add_notification( self.current_user.username, int(time.time()), self._("Invalid submission!"), error ) self.redirect("/tasks/%s" % encrypt_number(self.task.id)) return # Check if submitted files are small enough. if any([len(f[1]) > config.max_submission_length for f in self.files.values()]): self.application.service.add_notification( self.current_user.username, int(time.time()), self._("Submission too big!"), self._("Each files must be at most %d bytes long.") % config.max_submission_length, ) self.redirect("/tasks/%s" % encrypt_number(self.task.id)) return # All checks done, submission accepted. # Attempt to store the submission locally to be able to # recover a failure. self.local_copy_saved = False if config.submit_local_copy: try: path = os.path.join( config.submit_local_copy_path.replace("%s", config.data_dir), self.current_user.username ) if not os.path.exists(path): os.makedirs(path) with codecs.open(os.path.join(path, str(self.timestamp)), "w", "utf-8") as file_: pickle.dump((self.contest.id, self.current_user.id, self.task, self.files), file_) self.local_copy_saved = True except Exception as error: logger.error("Submission local copy failed - %s" % traceback.format_exc()) self.username = self.current_user.username self.sql_session.close() # We now have to send all the files to the destination... try: for filename in self.files: digest = self.application.service.file_cacher.put_file( description="Submission file %s sent by %s at %d." % (filename, self.username, self.timestamp), binary_data=self.files[filename][1], ) self.file_digests[filename] = digest # In case of error, the server aborts the submission except Exception as error: logger.error("Storage failed! %s" % error) if self.local_copy_saved: message = "In case of emergency, this server has a local copy." else: message = "No local copy stored! Your submission was ignored." self.application.service.add_notification( self.username, int(time.time()), self._("Submission storage failed!"), self._(message) ) self.redirect("/tasks/%s" % encrypt_number(self.task_id)) # All the files are stored, ready to submit! self.sql_session = Session() current_user = self.get_current_user() self.task = Task.get_from_id(self.task_id, self.sql_session) logger.info("All files stored for submission sent by %s" % self.username) submission = Submission( user=current_user, task=self.task, timestamp=self.timestamp, files={}, language=self.submission_lang ) for filename, digest in self.file_digests.items(): self.sql_session.add(File(digest, filename, submission)) self.sql_session.add(submission) self.sql_session.commit() self.r_params["submission"] = submission self.r_params["warned"] = False self.application.service.evaluation_service.new_submission(submission_id=submission.id) self.application.service.add_notification( self.username, int(time.time()), self._("Submission received"), self._("Your submission has been received " "and is currently being evaluated."), ) # The argument (encripted submission id) is not used by CWS # (nor it discloses information to the user), but it is useful # for automatic testing to obtain the submission id). self.redirect("/tasks/%s?%s" % (encrypt_number(self.task.id), encrypt_number(submission.id)))
class BaseHandler(CommonRequestHandler): """Base RequestHandler for this application. All the RequestHandler classes in this application should be a child of this class. """ def safe_get_item(self, cls, ident, session=None): """Get item from database of class cls and id ident, using session if given, or self.sql_session if not given. If id is not found, raise a 404. cls (class): class of object to retrieve. ident (string): id of object. session (session/None): session to use. return (object/404): the object with the given id, or 404. """ if session is None: session = self.sql_session entity = cls.get_from_id(ident, session) if entity is None: raise tornado.web.HTTPError(404) return entity def prepare(self): """This method is executed at the beginning of each request. """ # Attempt to update the contest and all its references # If this fails, the request terminates. self.set_header("Cache-Control", "no-cache, must-revalidate") self.sql_session = Session() self.sql_session.expire_all() self.contest = None localization_dir = os.path.join(os.path.dirname(__file__), "mo") if os.path.exists(localization_dir): tornado.locale.load_gettext_translations(localization_dir, "cms") def render_params(self): """Return the default render params used by almost all handlers. return (dict): default render params """ params = {} params["timestamp"] = int(time.time()) params["contest"] = self.contest params["url_root"] = get_url_root(self.request.path) if self.contest is not None: params["phase"] = self.contest.phase(params["timestamp"]) # Keep "== None" in filter arguments params["unanswered"] = self.sql_session.query(Question)\ .join(User)\ .filter(User.contest_id == self.contest.id)\ .filter(Question.reply_timestamp == None)\ .filter(Question.ignored == False)\ .count() params["contest_list"] = self.sql_session.query(Contest).all() params["cookie"] = str(self.cookies) return params def finish(self, *args, **kwds): """ Finish this response, ending the HTTP request. We override this method in order to properly close the database. """ logger.debug("Closing SQL connection.") self.sql_session.close() tornado.web.RequestHandler.finish(self, *args, **kwds) def get_non_negative_int(self, argument_name, default, allow_empty=True): """ Get a non-negative integer from the arguments. Use default if the argument is missing; If allow_empty=False, Empty values such as "" and None are not permitted. Raise ValueError if the argument can't be converted into a non-negative integer. """ argument = self.get_argument(argument_name, repr(default)) if allow_empty and \ (argument is None or argument == "" or argument == "None"): return None try: argument = int(argument) except: raise ValueError("%s: can't cast %s to int." % (argument_name, argument)) if argument < 0: raise ValueError("%s is negative." % argument_name) return argument
def run(contest_id): session = Session() contest = Contest.get_from_id(contest_id, session) task_by_team = set() task_by_lang = set() task_dir = os.path.join(os.path.dirname(__file__), "tasks") for t in os.listdir(task_dir): if t.endswith('.json'): task = t[:-5] task_path = os.path.join(task_dir, t) with open(task_path) as task_file: data = json.load(task_file) if "teams" in data: for team, v in data["teams"].iteritems(): for lang in v: task_by_team.add((task, lang, team)) if "langs" in data: for lang, v in data["langs"].iteritems(): for team in v: task_by_lang.add((task, lang, team)) if task_by_team != task_by_lang: print "ERROR: data in 'tasks' is not self-consistent" print repr(task_by_team - task_by_lang) print repr(task_by_lang - task_by_team) return team_by_task = set() team_by_lang = set() team_dir = os.path.join(os.path.dirname(__file__), "teams") for t in os.listdir(team_dir): if t.endswith('.json'): team = t[:-5] team_path = os.path.join(team_dir, t) with open(team_path) as team_file: data = json.load(team_file) if "tasks" in data: for task, v in data["tasks"].iteritems(): for lang in v: team_by_task.add((task, lang, team)) if "langs" in data: for lang, v in data["langs"].iteritems(): for task in v: team_by_lang.add((task, lang, team)) if team_by_task != team_by_lang: print "ERROR: data in 'teams' is not self-consistent" print repr(team_by_task - team_by_lang) print repr(team_by_lang - team_by_task) return if task_by_team != team_by_task: print "ERROR: data in 'tasks' and 'teams' is different" print repr(task_by_team - team_by_task) print repr(team_by_task - task_by_team) return data_by_lang = set() data_by_team = set() data_dir = os.path.join(os.path.dirname(__file__), "data") for task in os.listdir(data_dir): if os.path.isdir(os.path.join(data_dir, task)): for f in os.listdir(os.path.join(data_dir, task, "by_lang")): # f == "lang (team).pdf" lang, team = re.findall( "^([A-Za-z0-9_]+) \(([A-Za-z0-9_]+)\)\.pdf$", f)[0] data_by_lang.add((task, lang, team)) for f in os.listdir(os.path.join(data_dir, task, "by_team")): # f == "team (lang).pdf" team, lang = re.findall( "^([A-Za-z0-9_]+) \(([A-Za-z0-9_]+)\)\.pdf$", f)[0] data_by_team.add((task, lang, team)) if data_by_lang != data_by_team: print "ERROR: PDF files in 'data' are not complete" print repr(data_by_lang - data_by_team) print repr(data_by_team - data_by_lang) return if task_by_team != data_by_lang: print "ERROR: PDF files in 'data' do not match JSON data" print repr(task_by_team - data_by_lang) print repr(data_by_lang - task_by_team) return print "Hooray! Data is consistent!" # Pick one at random: they're all equal. translations = task_by_team # Determine language codes used in CMS. codes = dict() # Read JSON files in 'tasks' again as it provides data already # grouped as we need it, and not simply as a list of tuples. for t in os.listdir(task_dir): if t.endswith('.json'): task = t[:-5] task_path = os.path.join(task_dir, t) with open(task_path) as task_file: data = json.load(task_file) if "langs" in data: for lang, v in data["langs"].iteritems(): if len(v) == 0: pass elif len(v) == 1 and v[0] != official_team: for team in v: codes[(task, lang, team)] = "%s" % lang else: for team in v: codes[(task, lang, team)] = "%s_%s" % (lang, ioi_to_iso2[team]) # Store the files as Statement objects. file_cacher = FileCacher() for task, lang, team in translations: if team == official_team: assert lang == "en" digest = file_cacher.put_file( path=os.path.join(data_dir, task, "by_lang", "%s (%s).pdf" % (lang, team)), description="Statement for task %s" % task) else: digest = file_cacher.put_file( path=os.path.join(data_dir, task, "by_lang", "%s (%s).pdf" % (lang, team)), description= "Statement for task %s, translated into %s (%s) by %s (%s)" % (task, langs[lang], lang, teams[team], team)) s = Statement(codes[(task, lang, team)], digest, task=contest.get_task(task)) session.add(s) session.commit() primary = dict() # Retrieve the statements selected by each team. for t in os.listdir(team_dir): if t.endswith('.json'): team = t[:-5] team_path = os.path.join(team_dir, t) with open(team_path) as team_file: data = json.load(team_file) for team2, lang, task in data.get("selected", []): # A team could have selected a statement that later got removed. if (task, lang, team2) in codes: primary.setdefault(team, {}).setdefault( task, []).append(codes[(task, lang, team2)]) # Add the ones they uploaded themselves. for task, lang, team in translations: # Don't worry about duplicates, CWS filters them out. primary.setdefault(team, {}).setdefault(task, []).append(codes[(task, lang, team)]) # Set the primary statements for tasks (i.e. the ones of the official team) for task, primary2 in primary.get(official_team, {}).iteritems(): contest.get_task(task).primary_statements = json.dumps(primary2) # Set the primary statements for teams for team, primary2 in primary.iteritems(): session.execute( "UPDATE users SET primary_statements = '%s' WHERE username LIKE '%s%%';" % (json.dumps(primary2), team)) session.commit() print "Statements stored in the DB!"
def run(contest_id): session = Session() contest = Contest.get_from_id(contest_id, session) task_by_team = set() task_by_lang = set() task_dir = os.path.join(os.path.dirname(__file__), "tasks") for t in os.listdir(task_dir): if t.endswith('.json'): task = t[:-5] task_path = os.path.join(task_dir, t) with open(task_path) as task_file: data = json.load(task_file) if "teams" in data: for team, v in data["teams"].iteritems(): for lang in v: task_by_team.add((task, lang, team)) if "langs" in data: for lang, v in data["langs"].iteritems(): for team in v: task_by_lang.add((task, lang, team)) if task_by_team != task_by_lang: print "ERROR: data in 'tasks' is not self-consistent" print repr(task_by_team - task_by_lang) print repr(task_by_lang - task_by_team) return team_by_task = set() team_by_lang = set() team_dir = os.path.join(os.path.dirname(__file__), "teams") for t in os.listdir(team_dir): if t.endswith('.json'): team = t[:-5] team_path = os.path.join(team_dir, t) with open(team_path) as team_file: data = json.load(team_file) if "tasks" in data: for task, v in data["tasks"].iteritems(): for lang in v: team_by_task.add((task, lang, team)) if "langs" in data: for lang, v in data["langs"].iteritems(): for task in v: team_by_lang.add((task, lang, team)) if team_by_task != team_by_lang: print "ERROR: data in 'teams' is not self-consistent" print repr(team_by_task - team_by_lang) print repr(team_by_lang - team_by_task) return if task_by_team != team_by_task: print "ERROR: data in 'tasks' and 'teams' is different" print repr(task_by_team - team_by_task) print repr(team_by_task - task_by_team) return data_by_lang = set() data_by_team = set() data_dir = os.path.join(os.path.dirname(__file__), "data") for task in os.listdir(data_dir): if os.path.isdir(os.path.join(data_dir, task)): for f in os.listdir(os.path.join(data_dir, task, "by_lang")): # f == "lang (team).pdf" lang, team = re.findall("^([A-Za-z0-9_]+) \(([A-Za-z0-9_]+)\)\.pdf$", f)[0] data_by_lang.add((task, lang, team)) for f in os.listdir(os.path.join(data_dir, task, "by_team")): # f == "team (lang).pdf" team, lang = re.findall("^([A-Za-z0-9_]+) \(([A-Za-z0-9_]+)\)\.pdf$", f)[0] data_by_team.add((task, lang, team)) if data_by_lang != data_by_team: print "ERROR: PDF files in 'data' are not complete" print repr(data_by_lang - data_by_team) print repr(data_by_team - data_by_lang) return if task_by_team != data_by_lang: print "ERROR: PDF files in 'data' do not match JSON data" print repr(task_by_team - data_by_lang) print repr(data_by_lang - task_by_team) return print "Hooray! Data is consistent!" # Pick one at random: they're all equal. translations = task_by_team # Determine language codes used in CMS. codes = dict() # Read JSON files in 'tasks' again as it provides data already # grouped as we need it, and not simply as a list of tuples. for t in os.listdir(task_dir): if t.endswith('.json'): task = t[:-5] task_path = os.path.join(task_dir, t) with open(task_path) as task_file: data = json.load(task_file) if "langs" in data: for lang, v in data["langs"].iteritems(): if len(v) == 0: pass elif len(v) == 1 and v[0] != official_team: for team in v: codes[(task, lang, team)] = "%s" % lang else: for team in v: codes[(task, lang, team)] = "%s_%s" % (lang, ioi_to_iso2[team]) # Store the files as Statement objects. file_cacher = FileCacher() for task, lang, team in translations: if team == official_team: assert lang == "en" digest = file_cacher.put_file( path=os.path.join(data_dir, task, "by_lang", "%s (%s).pdf" % (lang, team)), description="Statement for task %s" % task) else: digest = file_cacher.put_file( path=os.path.join(data_dir, task, "by_lang", "%s (%s).pdf" % (lang, team)), description="Statement for task %s, translated into %s (%s) by %s (%s)" % (task, langs[lang], lang, teams[team], team)) s = Statement(codes[(task, lang, team)], digest, task=contest.get_task(task)) session.add(s) session.commit() primary = dict() # Retrieve the statements selected by each team. for t in os.listdir(team_dir): if t.endswith('.json'): team = t[:-5] team_path = os.path.join(team_dir, t) with open(team_path) as team_file: data = json.load(team_file) for team2, lang, task in data.get("selected", []): # A team could have selected a statement that later got removed. if (task, lang, team2) in codes: primary.setdefault(team, {}).setdefault(task, []).append(codes[(task, lang, team2)]) # Add the ones they uploaded themselves. for task, lang, team in translations: # Don't worry about duplicates, CWS filters them out. primary.setdefault(team, {}).setdefault(task, []).append(codes[(task, lang, team)]) # Set the primary statements for tasks (i.e. the ones of the official team) for task, primary2 in primary.get(official_team, {}).iteritems(): contest.get_task(task).primary_statements = json.dumps(primary2) # Set the primary statements for teams for team, primary2 in primary.iteritems(): session.execute("UPDATE users SET primary_statements = '%s' WHERE username LIKE '%s%%';" % (json.dumps(primary2), team)) session.commit() print "Statements stored in the DB!"