def _missing_operations(self): """Return a generator of data to be sent to the rankings.. """ counter = 0 with SessionGen() as session: contest = Contest.get_from_id(self.contest_id, session) for submission in contest.get_submissions(): if submission.user.hidden: continue # The submission result can be None if the dataset has # been just made live. sr = submission.get_result() if sr is None: continue if sr.evaluated() and \ submission.id not in self.scores_sent_to_rankings: for operation in self.operations_for_score(submission): self.enqueue(operation) counter += 1 if submission.tokened() and \ submission.id not in self.tokens_sent_to_rankings: for operation in self.operations_for_token(submission): self.enqueue(operation) counter += 1 return counter
def harvest_contest_data(contest_id): """Retrieve the couples username, password and the task list for a given contest. contest_id (int): the id of the contest we want. return (tuple): the first element is a dictionary mapping usernames to passwords; the second one is the list of the task names. """ users = {} tasks = [] with SessionGen() as session: contest = Contest.get_from_id(contest_id, session) for participation in contest.participations: user = participation.user # Pick participation's password if present, or the user's. password_source = participation.password if password_source is None: password_source = user.password # We can log in only if we know the plaintext password. method, password = parse_authentication(password_source) if method != "plaintext": print("Not using user %s with non-plaintext password." % user.username) continue users[user.username] = {'password': password} for task in contest.tasks: tasks.append((task.id, task.name, list(iterkeys(task.statements)))) return users, tasks
def search_jobs_not_done(self): """Sweep the database and search for work to do. Iterate over all submissions and look if they are in a suitable status to be sent (scored and not hidden) but, for some reason, haven't been sent yet (that is, their ID doesn't appear in the *_sent_to_rankings sets). In case, arrange for them to be sent. """ logger.info("Going to search for unsent subchanges.") job_count = 0 with SessionGen() as session: contest = Contest.get_from_id(self.contest_id, session) for submission in contest.get_submissions(): if submission.user.hidden: continue if submission.get_result().scored() and \ submission.id not in self.scores_sent_to_rankings: self.send_score(submission) job_count += 1 if submission.tokened() and \ submission.id not in self.tokens_sent_to_rankings: self.send_token(submission) job_count += 1 logger.info("Found %d unsent subchanges." % job_count)
def choose_contest(self): """Fill self.contest using contest passed as argument or path. If a contest was specified as argument to CWS, fill self.contest with that; otherwise extract it from the URL path. """ if self.is_multi_contest(): # Choose the contest found in the path argument # see: https://github.com/tornadoweb/tornado/issues/1673 contest_name = self.path_args[0] # Select the correct contest or return an error self.contest = self.sql_session.query(Contest)\ .filter(Contest.name == contest_name).first() if self.contest is None: self.contest = Contest( name=contest_name, description=contest_name) # render_params in this class assumes the contest is loaded, # so we cannot call it without a fully defined contest. Luckily # the one from the base class is enough to display a 404 page. self.r_params = super().render_params() raise tornado.web.HTTPError(404) else: # Select the contest specified on the command line self.contest = Contest.get_from_id( self.service.contest_id, self.sql_session)
def _missing_operations(self): """Return a generator of data to be sent to the rankings.. """ counter = 0 with SessionGen() as session: contest = Contest.get_from_id(self.contest_id, session) for submission in contest.get_submissions(): if submission.user.hidden: continue if submission.get_result().scored() and \ submission.id not in self.scores_sent_to_rankings: for operation in self.operations_for_score(submission): self.enqueue(operation) counter += 1 if submission.tokened() and \ submission.id not in self.tokens_sent_to_rankings: for operation in self.operations_for_token(submission): self.enqueue(operation) counter += 1 return counter
def choose_contest(self): """Fill self.contest using contest passed as argument or path. If a contest was specified as argument to CWS, fill self.contest with that; otherwise extract it from the URL path. """ if self.is_multi_contest(): # Choose the contest found in the path argument # see: https://github.com/tornadoweb/tornado/issues/1673 contest_name = self.path_args[0] # Select the correct contest or return an error self.contest = self.sql_session.query(Contest)\ .filter(Contest.name == contest_name).first() if self.contest is None: self.contest = Contest(name=contest_name, description=contest_name) # render_params in this class assumes the contest is loaded, # so we cannot call it without a fully defined contest. Luckily # the one from the base class is enough to display a 404 page. super().prepare() self.r_params = super().render_params() raise tornado.web.HTTPError(404) else: # Select the contest specified on the command line self.contest = Contest.get_from_id(self.service.contest_id, self.sql_session)
def precache_files(self, contest_id): """RPC to ask the worker to precache of files in the contest. contest_id (int): the id of the contest """ # In order to avoid a long-living connection, first fetch the # complete list of files and then download the files; since # this is just pre-caching, possible race conditions are not # dangerous logger.info("Precaching files for contest %d.", contest_id) with SessionGen() as session: contest = Contest.get_from_id(contest_id, session) files = enumerate_files(session, contest, skip_submissions=True, skip_user_tests=True, skip_print_jobs=True) for digest in files: try: self.file_cacher.load(digest, if_needed=True) except KeyError: # No problem (at this stage) if we cannot find the # file pass logger.info("Precaching finished.")
def _missing_operations(self): """Return a generator of data to be sent to the rankings.. """ counter = 0 with SessionGen() as session: contest = Contest.get_from_id(self.contest_id, session) for submission in contest.get_submissions(): if submission.participation.hidden: continue # The submission result can be None if the dataset has # been just made live. sr = submission.get_result() if sr is None: continue if sr.scored() and \ submission.id not in self.scores_sent_to_rankings: for operation in self.operations_for_score(submission): self.enqueue(operation) counter += 1 if submission.tokened() and \ submission.id not in self.tokens_sent_to_rankings: for operation in self.operations_for_token(submission): self.enqueue(operation) counter += 1 return counter
def add_participations(contest_id, groupname): with SessionGen() as session: users = session.query(User) contest = Contest.get_from_id(contest_id, session) if contest is None: logger.error("No contest with id `%s' found.", contest_id) return False if groupname is None: group = contest.main_group else: group = \ session.query(Group) \ .filter(Group.contest_id == contest_id, Group.name == groupname).first() if group is None: logger.error("No group with name `%s' found.", groupname) return False for user in users: if session.query(Participation) \ .filter(Participation.contest_id == contest_id, Participation.user_id == user.id).first(): logger.info( "Participation already exists (left untouched; group not verified): '%s'", user.username) else: participation = Participation(user=user, contest=contest, group=group) session.add(participation) logger.info("Participation added: '%s'", user.username) session.commit() return True
def __init__(self, contest_id): super(MyContest, self).__init__(_session) self.contest_id = contest_id self.contest = Contest.get_from_id(contest_id, self.sql_session) self.name = self.contest.name self.description = self.contest.description self.questions = QuestionList(self.contest_id) self.announcements = AnnouncementList(self.contest_id)
def initialize(self): """Send basic data to all the rankings. It's data that's supposed to be sent before the contest, that's needed to understand what we're talking about when we send submissions: contest, users, tasks. No support for teams, flags and faces. """ logger.info("Initializing rankings.") with SessionGen() as session: contest = Contest.get_from_id(self.contest_id, session) if contest is None: logger.error( "Received request for unexistent contest " "id %s.", self.contest_id) raise KeyError("Contest not found.") contest_id = encode_id(contest.name) contest_data = { "name": contest.description, "begin": int(make_timestamp(contest.start)), "end": int(make_timestamp(contest.stop)), "score_precision": contest.score_precision } users = dict() for user in contest.users: if not user.hidden: users[encode_id(user.username)] = \ {"f_name": user.first_name, "l_name": user.last_name, "team": None} tasks = dict() for task in contest.tasks: score_type = get_score_type(dataset=task.active_dataset) tasks[encode_id(task.name)] = \ {"short_name": task.name, "name": task.title, "contest": encode_id(contest.name), "order": task.num, "max_score": score_type.max_score, "extra_headers": score_type.ranking_headers, "score_precision": task.score_precision, "score_mode": task.score_mode} self.enqueue( ProxyOperation(ProxyExecutor.CONTEST_TYPE, {contest_id: contest_data})) self.enqueue(ProxyOperation(ProxyExecutor.USER_TYPE, users)) self.enqueue(ProxyOperation(ProxyExecutor.TASK_TYPE, tasks))
def add_participation(username, contest_id, ip, delay_time, extra_time, password, method, is_hashed, team_code, hidden, unrestricted): logger.info("Creating the user's participation in the database.") delay_time = delay_time if delay_time is not None else 0 extra_time = extra_time if extra_time is not None else 0 if hidden: logger.warning("The participation will be hidden") if unrestricted: logger.warning("The participation will be unrestricted") try: with SessionGen() as session: user = \ session.query(User).filter(User.username == username).first() if user is None: logger.error("No user with username `%s' found.", username) return False contest = Contest.get_from_id(contest_id, session) if contest is None: logger.error("No contest with id `%s' found.", contest_id) return False team = None if team_code is not None: team = \ session.query(Team).filter(Team.code == team_code).first() if team is None: logger.error("No team with code `%s' found.", team_code) return False if password is not None: if is_hashed: password = build_password(password, method) else: password = hash_password(password, method) participation = Participation( user=user, contest=contest, ip=[ipaddress.ip_network(ip)] if ip is not None else None, delay_time=datetime.timedelta(seconds=delay_time), extra_time=datetime.timedelta(seconds=extra_time), password=password, team=team, hidden=hidden, unrestricted=unrestricted) session.add(participation) session.commit() except IntegrityError: logger.error("A participation for this user in this contest " "already exists.") return False logger.info("Participation added.") return True
def initialize(self): """Send basic data to all the rankings. It's data that's supposed to be sent before the contest, that's needed to understand what we're talking about when we send submissions: contest, users, tasks. No support for teams, flags and faces. """ logger.info("Initializing rankings.") with SessionGen() as session: contest = Contest.get_from_id(self.contest_id, session) if contest is None: logger.error("Received request for unexistent contest " "id %s.", self.contest_id) raise KeyError("Contest not found.") contest_id = encode_id(contest.name) contest_data = { "name": contest.description, "begin": int(make_timestamp(contest.start)), "end": int(make_timestamp(contest.stop)), "score_precision": contest.score_precision} users = dict() for participation in contest.participations: user = participation.user if not participation.hidden: users[encode_id(user.username)] = \ {"f_name": user.first_name, "l_name": user.last_name, "team": None} tasks = dict() for task in contest.tasks: score_type = get_score_type(dataset=task.active_dataset) tasks[encode_id(task.name)] = \ {"short_name": task.name, "name": task.title, "contest": encode_id(contest.name), "order": task.num, "max_score": score_type.max_score, "extra_headers": score_type.ranking_headers, "score_precision": task.score_precision, "score_mode": task.score_mode} self.enqueue(ProxyOperation(ProxyExecutor.CONTEST_TYPE, {contest_id: contest_data})) self.enqueue(ProxyOperation(ProxyExecutor.USER_TYPE, users)) self.enqueue(ProxyOperation(ProxyExecutor.TASK_TYPE, tasks))
def prepare(self): """This method is executed at the beginning of each request. """ super(BaseHandler, self).prepare() self.contest = Contest.get_from_id(self.application.service.contest, self.sql_session) self._ = self.locale.translate self.r_params = self.render_params()
def add_user(contest_id, first_name, last_name, username, password, ip_address, email, hidden): with SessionGen(commit=True) as session: contest = Contest.get_from_id(contest_id, session) user = User(first_name=first_name, last_name=last_name, username=username, password=password, email=email, ip=ip_address, hidden=hidden, contest=contest) session.add(user)
def add_user(contest_id, first_name, last_name, username, password, ip_address, email, hidden): with SessionGen() as session: contest = Contest.get_from_id(contest_id, session) user = User(first_name=first_name, last_name=last_name, username=username, password=password, email=email, ip=ip_address, hidden=hidden, contest=contest) session.add(user) session.commit()
def prepare(self): """This method is executed at the beginning of each request. """ self.timestamp = make_datetime() self.set_header("Cache-Control", "no-cache, must-revalidate") self.sql_session = Session() self.contest = Contest.get_from_id(self.application.service.contest, self.sql_session) self._ = self.locale.translate self.r_params = self.render_params()
def precache_files(self, contest_id): """RPC to ask the worker to precache of files in the contest. contest_id (int): the id of the contest """ # Lock is not needed if the admins correctly placed cache and # temp directories in the same filesystem. This is what # usually happens since they are children of the same, # cms-created, directory. logger.info("Precaching files for contest %d." % contest_id) with SessionGen() as session: contest = Contest.get_from_id(contest_id, session) for digest in contest.enumerate_files(skip_submissions=True, skip_user_tests=True): self.file_cacher.load(digest) logger.info("Precaching finished.")
def rankings_initialize(self): """Send to all the rankings all the data that are supposed to be sent before the contest: contest, users, tasks. No support for teams, flags and faces. """ logger.info("Initializing rankings.") with SessionGen(commit=False) as session: contest = Contest.get_from_id(self.contest_id, session) if contest is None: logger.error("Received request for unexistent contest " "id %s." % self.contest_id) raise KeyError contest_id = encode_id(contest.name) contest_data = { "name": contest.description, "begin": int(make_timestamp(contest.start)), "end": int(make_timestamp(contest.stop)), "score_precision": contest.score_precision} users = dict((encode_id(user.username), {"f_name": user.first_name, "l_name": user.last_name, "team": None}) for user in contest.users if not user.hidden) tasks = dict((encode_id(task.name), {"name": task.title, "contest": encode_id(contest.name), "max_score": 100.0, "score_precision": task.score_precision, "extra_headers": [], "order": task.num, "short_name": task.name}) for task in contest.tasks) for ranking in self.rankings: ranking.data_queue.put((ranking.CONTEST_TYPE, {contest_id: contest_data})) ranking.data_queue.put((ranking.USER_TYPE, users)) ranking.data_queue.put((ranking.TASK_TYPE, tasks))
def contest_from_db(contest_id, session): """Return the contest object with the given id contest_id (int|None): the id of the contest, or None to return None. session (Session): SQLAlchemy session to use. return (Contest|None): None if contest_id is None, or the contest. raise (ImportDataError): if there is no contest with the given id. """ if contest_id is None: return None contest = Contest.get_from_id(contest_id, session) if contest is None: raise ImportDataError( "The specified contest (id %s) does not exist." % contest_id) return contest
def contest_from_db(contest_id, session): """Return the contest object with the given id contest_id (int|None): the id of the contest, or None to return None. session (Session): SQLAlchemy session to use. return (Contest|None): None if contest_id is None, or the contest. raise (ImportDataError): if there is no contest with the given id. """ if contest_id is None: return None contest = Contest.get_from_id(contest_id, session) if contest is None: raise ImportDataError("The specified contest (id %s) does not exist." % contest_id) return contest
def harvest_contest_data(contest_id): """Retrieve the couples username, password and the task list for a given contest. contest_id (int): the id of the contest we want. return (tuple): the first element is a dictionary mapping usernames to passwords; the second one is the list of the task names. """ users = {} tasks = [] with SessionGen() as session: contest = Contest.get_from_id(contest_id, session) for user in contest.users: users[user.username] = {'password': user.password} for task in contest.tasks: tasks.append((task.id, task.name)) return users, tasks
def add_participation(username, contest_id, ip, delay_time, extra_time, password, team_code, hidden): logger.info("Creating the user in the database.") delay_time = delay_time if delay_time is not None else 0 extra_time = extra_time if extra_time is not None else 0 try: with SessionGen() as session: user = \ session.query(User).filter(User.username == username).first() if user is None: logger.error("No user with username `%s' found.", username) return False contest = Contest.get_from_id(contest_id, session) if contest is None: logger.error("No contest with id `%s' found.", contest_id) return False team = None if team_code is not None: team = \ session.query(Team).filter(Team.code == team_code).first() if team is None: logger.error("No team with code `%s' found.", team_code) return False participation = Participation( user=user, contest=contest, ip=ip, delay_time=datetime.timedelta(seconds=delay_time), extra_time=datetime.timedelta(seconds=extra_time), password=password, team=team, hidden=hidden) session.add(participation) session.commit() except IntegrityError: logger.error("A participation for this user in this contest " "already exists.") return False logger.info("Participation added.") return True
def search_jobs_not_done(self): """Look in the database for submissions that have not been scored for no good reasons. Put the missing job in the queue. """ # Do this only if we are not still loading old submission # (from the start of the service). if self.scoring_old_submission: return True with SessionGen(commit=False) as session: contest = Contest.get_from_id(self.contest_id, session) new_submission_results_to_score = set() for submission in contest.get_submissions(): if submission.user.hidden: continue for dataset in get_datasets_to_judge(submission.task): sr = submission.get_result(dataset) sr_id = (submission.id, dataset.id) if sr is not None and (sr.evaluated() or sr.compilation_outcome == "fail") and \ sr_id not in self.submission_results_scored: new_submission_results_to_score.add(sr_id) if submission.tokened() and \ submission.id not in self.tokens_sent_to_rankings: self.rankings_send_token(submission) new_s = len(new_submission_results_to_score) old_s = len(self.submission_results_to_score) logger.info("Submissions found to score: %d." % new_s) if new_s > 0: self.submission_results_to_score |= new_submission_results_to_score if old_s == 0: self.add_timeout(self.score_old_submissions, None, 0.5, immediately=False) # Run forever. return True
def generate_passwords(contest_id, exclude_hidden, exclude_unrestricted, output_path): logger.info("Updating passwords...") with open(output_path, 'w') as io: io.write("contest_id,team,fullname,username,password\n") with SessionGen() as session: if contest_id is not None: contest = Contest.get_from_id(contest_id, session) objects = session.query(Participation).join( Participation.user).join(Participation.team) if exclude_unrestricted: objects = objects.filter( Participation.unrestricted == False) if exclude_hidden: objects = objects.filter(Participation.hidden == False) else: objects = session.query(User) for obj in objects: password = generate_random_password() obj.password = build_password(password, 'plaintext') user = obj if isinstance(obj, User) else obj.user fullname = "%s %s" % (user.first_name, user.last_name) if isinstance(obj, Participation): team = obj.team.code if obj.team is not None else '' logger.info( "Updating participation of user %s (team=%s) on contest id %d", user.username, team, contest.id) io.write( "%d,%s,%s,%s,%s\n" % (contest.id, team, fullname, user.username, password)) else: logger.info("Updating user %s", user.username) io.write(",,%s,%s,%s\n" % (fullname, user.username, password)) session.commit() logger.info("Done.") return True
def do_export(self): """Run the actual export code. """ logger.operation = "exporting contest %s" % self.contest_id logger.info("Starting export.") logger.info("Creating dir structure.") try: os.mkdir(self.spool_dir) except OSError: logger.critical("The specified directory already exists, " "I won't overwrite it.") return False os.mkdir(self.upload_dir) with SessionGen() as session: self.contest = Contest.get_from_id(self.contest_id, session) self.submissions = sorted( (submission for submission in self.contest.get_submissions() if not submission.participation.hidden), key=lambda submission: submission.timestamp) # Creating users' directory. for participation in self.contest.participations: if not participation.hidden: os.mkdir(os.path.join( self.upload_dir, participation.user.username)) try: self.export_submissions() self.export_ranking() except Exception: logger.critical("Generic error.", exc_info=True) return False logger.info("Export finished.") logger.operation = "" return True
def do_export(self): """Run the actual export code. """ logger.operation = "exporting contest %s" % self.contest_id logger.info("Starting export.") logger.info("Creating dir structure.") try: os.mkdir(self.spool_dir) except OSError: logger.critical("The specified directory already exists, " "I won't overwrite it.") return False os.mkdir(self.upload_dir) with SessionGen() as session: self.contest = Contest.get_from_id(self.contest_id, session) self.submissions = sorted( (submission for submission in self.contest.get_submissions() if not submission.participation.hidden), key=lambda submission: submission.timestamp) # Creating users' directory. for participation in self.contest.participations: if not participation.hidden: os.mkdir( os.path.join(self.upload_dir, participation.user.username)) try: self.export_submissions() self.export_ranking() except Exception: logger.critical("Generic error.", exc_info=True) return False logger.info("Export finished.") logger.operation = "" return True
def __init__(self, contest_id, export_target, dump_files, dump_model, skip_generated, skip_submissions, skip_user_tests): self.contest_id = contest_id self.dump_files = dump_files self.dump_model = dump_model self.skip_generated = skip_generated self.skip_submissions = skip_submissions self.skip_user_tests = skip_user_tests # If target is not provided, we use the contest's name. if export_target == "": with SessionGen() as session: contest = Contest.get_from_id(self.contest_id, session) self.export_target = "dump_%s.tar.gz" % contest.name logger.warning("export_target not given, using \"%s\"" % self.export_target) else: self.export_target = export_target self.file_cacher = FileCacher()
def __init__(self, contest_id, export_target, dump_files, dump_model, light, skip_submissions, skip_user_tests): self.contest_id = contest_id self.dump_files = dump_files self.dump_model = dump_model self.light = light self.skip_submissions = skip_submissions self.skip_user_tests = skip_user_tests # If target is not provided, we use the contest's name. if export_target == "": with SessionGen(commit=False) as session: contest = Contest.get_from_id(self.contest_id, session) self.export_target = "dump_%s.tar.gz" % contest.name logger.warning("export_target not given, using \"%s\"" % self.export_target) else: self.export_target = export_target self.file_cacher = FileCacher()
def add_participation(username, contest_id): try: with SessionGen() as session: user = session.query(User).filter(User.username == username).first() if user is None: return False contest = Contest.get_from_id(contest_id, session) if contest is None: return False participation = Participation( user=user, contest=contest, hidden=False, unrestricted=False ) session.add(participation) session.commit() except IntegrityError: return False logger.info("Added participation for user {}".format(username)) return True
def precache_files(self, contest_id): """RPC to ask the worker to precache of files in the contest. contest_id (int): the id of the contest """ # In order to avoid a long-living connection, first fetch the # complete list of files and then download the files; since # this is just pre-caching, possible race conditions are not # dangerous logger.info("Precaching files for contest %d.", contest_id) with SessionGen() as session: contest = Contest.get_from_id(contest_id, session) files = contest.enumerate_files(skip_submissions=True, skip_user_tests=True) for digest in files: try: self.file_cacher.load(digest, if_needed=True) except KeyError: # No problem (at this stage) if we cannot find the # file pass logger.info("Precaching finished.")
def choose_contest(self): """Fill self.contest using contest passed as argument or path. If a contest was specified as argument to CWS, fill self.contest with that; otherwise extract it from the URL path. """ if self.is_multi_contest(): # Choose the contest found in the path argument # see: https://github.com/tornadoweb/tornado/issues/1673 contest_name = self.path_args[0] # Select the correct contest or return an error try: self.contest = self.contest_list[contest_name] except KeyError: self.contest = Contest( name=contest_name, description=contest_name) self.r_params = self.render_params() raise tornado.web.HTTPError(404) else: # Select the contest specified on the command line self.contest = Contest.get_from_id( self.application.service.contest_id, self.sql_session)
def __init__(self, contest_id, export_target, dump_files, dump_model, skip_generated, skip_submissions, skip_user_tests): self.contest_id = contest_id self.dump_files = dump_files self.dump_model = dump_model self.skip_generated = skip_generated self.skip_submissions = skip_submissions self.skip_user_tests = skip_user_tests self.export_target = export_target # If target is not provided, we use the contest's name. if export_target == "": with SessionGen() as session: contest = Contest.get_from_id(self.contest_id, session) if contest is None: logger.critical("Please specify a valid contest id.") self.contest_id = None else: self.export_target = "dump_%s.tar.gz" % contest.name logger.warning("export_target not given, using \"%s\"" % self.export_target) self.file_cacher = FileCacher()
def precache_files(self, contest_id): """RPC to ask the worker to precache of files in the contest. contest_id (int): the id of the contest """ lock = self.file_cacher.precache_lock() if lock is None: # Another worker is already precaching. Hence, this worker doesn't # need to do anything. logger.info( "Another worker is already precaching files for " "contest %d.", contest_id) return with lock: # In order to avoid a long-living connection, first fetch the # complete list of files and then download the files; since # this is just pre-caching, possible race conditions are not # dangerous logger.info("Precaching files for contest %d.", contest_id) with SessionGen() as session: contest = Contest.get_from_id(contest_id, session) files = enumerate_files(session, contest, skip_submissions=True, skip_user_tests=True, skip_print_jobs=True) for digest in files: try: self.file_cacher.cache_file(digest) except KeyError: # No problem (at this stage) if we cannot find the # file pass logger.info("Precaching finished.")
def do_export(self): """Run the actual export code.""" logger.info("Starting export.") export_dir = self.export_target archive_info = get_archive_info(self.export_target) if archive_info["write_mode"] != "": # We are able to write to this archive. if os.path.exists(self.export_target): logger.critical("The specified file already exists, " "I won't overwrite it.") return False export_dir = os.path.join(tempfile.mkdtemp(), archive_info["basename"]) logger.info("Creating dir structure.") try: os.mkdir(export_dir) except OSError: logger.critical("The specified directory already exists, " "I won't overwrite it.") return False files_dir = os.path.join(export_dir, "files") descr_dir = os.path.join(export_dir, "descriptions") os.mkdir(files_dir) os.mkdir(descr_dir) with SessionGen() as session: # Export files. logger.info("Exporting files.") if self.dump_files: for contest_id in self.contests_ids: contest = Contest.get_from_id(contest_id, session) files = contest.enumerate_files(self.skip_submissions, self.skip_user_tests, self.skip_generated) for file_ in files: if not self.safe_get_file(file_, os.path.join(files_dir, file_), os.path.join(descr_dir, file_)): return False # Export data in JSON format. if self.dump_model: logger.info("Exporting data to a JSON file.") # We use strings because they'll be the keys of a JSON # object self.ids = {} self.queue = [] data = dict() for cls, lst in [(Contest, self.contests_ids), (User, self.users_ids), (Task, self.tasks_ids)]: for i in lst: obj = cls.get_from_id(i, session) self.get_id(obj) # Specify the "root" of the data graph data["_objects"] = self.ids.values() while len(self.queue) > 0: obj = self.queue.pop(0) data[self.ids[obj.sa_identity_key]] = \ self.export_object(obj) data["_version"] = model_version with io.open(os.path.join(export_dir, "contest.json"), "wb") as fout: json.dump(data, fout, encoding="utf-8", indent=4, sort_keys=True) # If the admin requested export to file, we do that. if archive_info["write_mode"] != "": archive = tarfile.open(self.export_target, archive_info["write_mode"]) archive.add(export_dir, arcname=archive_info["basename"]) archive.close() rmtree(export_dir) logger.info("Export finished.") return True
def do_export(self): """Run the actual export code.""" logger.info("Starting export.") export_dir = self.export_target archive_info = get_archive_info(self.export_target) if archive_info["write_mode"] != "": # We are able to write to this archive. if os.path.exists(self.export_target): logger.critical("The specified file already exists, " "I won't overwrite it.") return False export_dir = os.path.join(tempfile.mkdtemp(), archive_info["basename"]) logger.info("Creating dir structure.") try: os.mkdir(export_dir) except OSError: logger.critical("The specified directory already exists, " "I won't overwrite it.") return False files_dir = os.path.join(export_dir, "files") descr_dir = os.path.join(export_dir, "descriptions") os.mkdir(files_dir) os.mkdir(descr_dir) with SessionGen() as session: # Export files. logger.info("Exporting files.") if self.dump_files: for contest_id in self.contests_ids: contest = Contest.get_from_id(contest_id, session) files = enumerate_files( session, contest, skip_submissions=self.skip_submissions, skip_user_tests=self.skip_user_tests, skip_users=self.skip_users, skip_print_jobs=self.skip_print_jobs, skip_generated=self.skip_generated) for file_ in files: if not self.safe_get_file(file_, os.path.join(files_dir, file_), os.path.join(descr_dir, file_)): return False # Export data in JSON format. if self.dump_model: logger.info("Exporting data to a JSON file.") # We use strings because they'll be the keys of a JSON # object self.ids = {} self.queue = [] data = dict() for cls, lst in [(Contest, self.contests_ids), (User, self.users_ids), (Task, self.tasks_ids)]: for i in lst: obj = cls.get_from_id(i, session) self.get_id(obj) # Specify the "root" of the data graph data["_objects"] = list(self.ids.values()) while len(self.queue) > 0: obj = self.queue.pop(0) data[self.ids[obj.sa_identity_key]] = \ self.export_object(obj) data["_version"] = model_version destination = os.path.join(export_dir, "contest.json") with open(destination, "wt", encoding="utf-8") as fout: json.dump(data, fout, indent=4, sort_keys=True) # If the admin requested export to file, we do that. if archive_info["write_mode"] != "": with tarfile.open(self.export_target, archive_info["write_mode"]) as archive: archive.add(export_dir, arcname=archive_info["basename"]) rmtree(export_dir) logger.info("Export finished.") return True
def initialize(self): """Send basic data to all the rankings. It's data that's supposed to be sent before the contest, that's needed to understand what we're talking about when we send submissions: contest, users, tasks. No support for teams, flags and faces. """ logger.info("Initializing rankings.") with SessionGen() as session: contest = Contest.get_from_id(self.contest_id, session) if contest is None: contests = get_contest_list(session) logger.warning( "Received request for unexistent contest " "id %s.", self.contest_id) # proxy service needs be reconfigured with an existing contest # to continue to function properly. logger.warning( "Attempting to recover by binding to a existing " "contest") contests = get_contest_list(session) n_contests = len(contests) if n_contests <= 0: logger.error( "No existing contests to bind to and recover.") sys.exit(0) else: contest = contests[n_contests - 1] self.contest_id = contest.id logger.warning( "Recovered by binding to a existing contest " "with id %s.", contest.id) contest_id = encode_id(contest.name) contest_data = { "name": contest.description, "begin": int(make_timestamp(contest.start)), "end": int(make_timestamp(contest.stop)), "score_precision": contest.score_precision } users = dict() teams = dict() for participation in contest.participations: user = participation.user team = participation.team if not participation.hidden: users[encode_id(user.username)] = { "f_name": user.first_name, "l_name": user.last_name, "team": team.code if team is not None else None, } if team is not None: teams[encode_id(team.code)] = {"name": team.name} tasks = dict() for task in contest.tasks: score_type = task.active_dataset.score_type_object tasks[encode_id(task.name)] = { "short_name": task.name, "name": task.title, "contest": encode_id(contest.name), "order": task.num, "max_score": score_type.max_score, "extra_headers": score_type.ranking_headers, "score_precision": task.score_precision, "score_mode": task.score_mode, } self.enqueue( ProxyOperation(ProxyExecutor.CONTEST_TYPE, {contest_id: contest_data})) self.enqueue(ProxyOperation(ProxyExecutor.TEAM_TYPE, teams)) self.enqueue(ProxyOperation(ProxyExecutor.USER_TYPE, users)) self.enqueue(ProxyOperation(ProxyExecutor.TASK_TYPE, tasks))
def do_export(self): """Run the actual export code.""" logger.info("Starting export.") export_dir = self.export_target archive_info = get_archive_info(self.export_target) if archive_info["write_mode"] != "": # We are able to write to this archive. if os.path.exists(self.export_target): logger.critical("The specified file already exists, " "I won't overwrite it.") return False export_dir = os.path.join(tempfile.mkdtemp(), archive_info["basename"]) logger.info("Creating dir structure.") try: os.mkdir(export_dir) except OSError: logger.critical("The specified directory already exists, " "I won't overwrite it.") return False files_dir = os.path.join(export_dir, "files") descr_dir = os.path.join(export_dir, "descriptions") os.mkdir(files_dir) os.mkdir(descr_dir) with SessionGen() as session: contest = Contest.get_from_id(self.contest_id, session) # Export files. if self.dump_files: logger.info("Exporting files.") files = contest.enumerate_files(self.skip_submissions, self.skip_user_tests, self.skip_generated) for file_ in files: if not self.safe_get_file(file_, os.path.join(files_dir, file_), os.path.join(descr_dir, file_)): return False # Export the contest in JSON format. if self.dump_model: logger.info("Exporting the contest to a JSON file.") # We use strings because they'll be the keys of a JSON # object; the contest will have ID 0. self.ids = {contest.sa_identity_key: "0"} self.queue = [contest] data = dict() while len(self.queue) > 0: obj = self.queue.pop(0) data[self.ids[obj.sa_identity_key]] = self.export_object(obj) # Specify the "root" of the data graph data["_objects"] = ["0"] data["_version"] = model_version with io.open(os.path.join(export_dir, "contest.json"), "wb") as fout: json.dump(data, fout, encoding="utf-8", indent=4, sort_keys=True) # If the admin requested export to file, we do that. if archive_info["write_mode"] != "": archive = tarfile.open(self.export_target, archive_info["write_mode"]) archive.add(export_dir, arcname=archive_info["basename"]) archive.close() rmtree(export_dir) logger.info("Export finished.") return True
def do_reimport(self): """Get the contest from the Loader and merge it.""" with SessionGen() as session: # Load the old contest from the database. old_contest = Contest.get_from_id(self.old_contest_id, session) old_users = dict((x.username, x) for x in old_contest.users) old_tasks = dict((x.name, x) for x in old_contest.tasks) # Load the new contest from the filesystem. new_contest, new_tasks, new_users = self.loader.get_contest() # Updates contest-global settings that are set in new_contest. self._update_columns(old_contest, new_contest) # Do the actual merge: compare all users of the old and of # the new contest and see if we need to create, update or # delete them. Delete only if authorized, fail otherwise. users = set(old_users.keys()) | set(new_users) for username in users: old_user = old_users.get(username, None) if old_user is None: # Create a new user. logger.info("Creating user %s" % username) new_user = self.loader.get_user(username) old_contest.users.append(new_user) elif username in new_users: # Update an existing user. logger.info("Updating user %s" % username) new_user = self.loader.get_user(username) self._update_object(old_user, new_user) else: # Delete an existing user. if self.force: logger.info("Deleting user %s" % username) old_contest.users.remove(old_user) else: logger.critical( "User %s exists in old contest, but " "not in the new one. Use -f to force." % username) return False # The same for tasks. Setting num for tasks requires a bit # of trickery, since we have to avoid triggering a # duplicate key constraint violation while we're messing # with the task order. To do that we just set sufficiently # high number on the first pass and then fix it on a # second pass. tasks = set(old_tasks.keys()) | set(new_tasks) current_num = max(len(old_tasks), len(new_tasks)) for task in tasks: old_task = old_tasks.get(task, None) if old_task is None: # Create a new task. logger.info("Creating task %s" % task) new_task = self.loader.get_task(task) new_task.num = current_num current_num += 1 old_contest.tasks.append(new_task) elif task in new_tasks: # Update an existing task. if self.full or self.loader.has_changed(task): logger.info("Updating task %s" % task) new_task = self.loader.get_task(task) new_task.num = current_num current_num += 1 self._update_object(old_task, new_task) else: logger.info("Task %s has not changed" % task) # Even unchanged tasks should use a temporary number # to avoid duplicate numbers when we fix them. old_task.num = current_num current_num += 1 else: # Delete an existing task. if self.force: logger.info("Deleting task %s" % task) session.delete(old_task) else: logger.critical( "Task %s exists in old contest, but " "not in the new one. Use -f to force." % task) return False session.flush() # And finally we fix the numbers; old_contest must be # refreshed because otherwise SQLAlchemy doesn't get aware # that some tasks may have been deleted tasks_order = dict((name, num) for num, name in enumerate(new_tasks)) session.refresh(old_contest) for task in old_contest.tasks: task.num = tasks_order[task.name] session.commit() logger.info("Reimport finished (contest id: %s)." % self.old_contest_id) return True
def do_reimport(self): """Get the contest from the Loader and merge it.""" with SessionGen() as session: # Load the old contest from the database. old_contest = Contest.get_from_id(self.old_contest_id, session) old_users = dict((x.username, x) for x in old_contest.users) old_tasks = dict((x.name, x) for x in old_contest.tasks) # Load the new contest from the filesystem. new_contest, new_tasks, new_users = self.loader.get_contest() # Updates contest-global settings that are set in new_contest. self._update_columns(old_contest, new_contest) # Do the actual merge: compare all users of the old and of # the new contest and see if we need to create, update or # delete them. Delete only if authorized, fail otherwise. users = set(old_users.keys()) | set(new_users) for username in users: old_user = old_users.get(username, None) if old_user is None: # Create a new user. logger.info("Creating user %s", username) new_user = self.loader.get_user(username) old_contest.users.append(new_user) elif username in new_users: # Update an existing user. logger.info("Updating user %s", username) new_user = self.loader.get_user(username) self._update_object(old_user, new_user) else: # Delete an existing user. if self.force: logger.info("Deleting user %s", username) old_contest.users.remove(old_user) else: logger.critical( "User %s exists in old contest, but " "not in the new one. Use -f to force.", username) return False # The same for tasks. Setting num for tasks requires a bit # of trickery, since we have to avoid triggering a # duplicate key constraint violation while we're messing # with the task order. To do that we just set sufficiently # high number on the first pass and then fix it on a # second pass. tasks = set(old_tasks.keys()) | set(new_tasks) current_num = max(len(old_tasks), len(new_tasks)) for task in tasks: old_task = old_tasks.get(task, None) if old_task is None: # Create a new task. logger.info("Creating task %s", task) new_task = self.loader.get_task(task) new_task.num = current_num current_num += 1 old_contest.tasks.append(new_task) elif task in new_tasks: # Update an existing task. if self.full or self.loader.has_changed(task): logger.info("Updating task %s", task) new_task = self.loader.get_task(task) new_task.num = current_num current_num += 1 self._update_object(old_task, new_task) else: logger.info("Task %s has not changed", task) # Even unchanged tasks should use a temporary number # to avoid duplicate numbers when we fix them. old_task.num = current_num current_num += 1 else: # Delete an existing task. if self.force: logger.info("Deleting task %s", task) session.delete(old_task) else: logger.critical( "Task %s exists in old contest, but " "not in the new one. Use -f to force.", task) return False session.flush() # And finally we fix the numbers; old_contest must be # refreshed because otherwise SQLAlchemy doesn't get aware # that some tasks may have been deleted tasks_order = dict( (name, num) for num, name in enumerate(new_tasks)) session.refresh(old_contest) for task in old_contest.tasks: task.num = tasks_order[task.name] session.commit() logger.info("Reimport finished (contest id: %s).", self.old_contest_id) return True
def load_participations(path): logger.info("Loading...") with open(path, 'r') as io: data = json.load(io) participations = data['participations'] with SessionGen() as session: for entry in participations: logger.info('Loading: %s' % (entry)) contest = Contest.get_from_id(entry['contest_id'], session) if contest is None: logger.error(" Contest ID %d not found" % (entry['contest_id'])) session.rollback() return False userdata = entry['user'] user = session.query(User).filter( User.username == userdata['username']).first() if user is None: user = User(username=userdata['username'], first_name=userdata['first_name'], last_name=userdata['last_name'], password=build_password( generate_random_password())) logger.info(' Creating new user: %s' % (user.username)) session.add(user) else: logger.info(' Using existing user: %s (id=%d)' % (user.username, user.id)) if 'plaintext_password' in userdata: logger.info(' * password') user.password = build_password(userdata['plaintext_password'], 'plaintext') if 'first_name' in userdata: logger.info(' * first_name: %s' % (userdata['first_name'])) user.first_name = userdata['first_name'] if 'last_name' in userdata: logger.info(' * last_name: %s' % (userdata['last_name'])) user.last_name = userdata['last_name'] participation = session.query(Participation).join( Participation.user).filter( Participation.contest == contest).filter( User.username == user.username).first() if participation is None: participation = Participation(user=user, contest=contest) logger.info( ' Creating new participation for contest_id=%d user=%s' % (contest.id, user.username)) session.add(participation) else: logger.info( ' Updating participation: id=%d contest_id=%d user=%s' % (participation.id, participation.contest_id, participation.user.username)) if 'plaintext_password' in entry: logger.info(' * plaintext_password') participation.password = build_password( entry['plaintext_password'], 'plaintext') if 'ip' in entry: logger.info(' * ip: %s' % (entry['ip'])) participation.ip = [ipaddress.ip_network(entry['ip'])] if 'delay_time' in entry: logger.info(' * delay_time: %d' % (entry['delay_time'])) participation.delay_time = datetime.timedelta( seconds=entry['delay_time']) if 'extra_time' in entry: logger.info(' * extra_time: %d' % (entry['extra_time'])) participation.extra_time = datetime.timedelta( seconds=entry['extra_time']) if 'hidden' in entry: logger.info(' * hidden: %s' % (entry['hidden'])) participation.hidden = entry['hidden'] if 'unrestricted' in entry: logger.info(' * unrestricted: %s' % (entry['unrestricted'])) participation.unrestricted = entry['unrestricted'] if 'team' in userdata: team = session.query(Team).filter( Team.code == userdata['team']['code']).first() if team is None: team = Team(code=userdata['team']['code'], name=userdata['team']['name']) logger.info(' Creating new team: %s' % (team.code)) session.add(team) else: logger.info(' Using existing team: %s' % (team.code)) if 'name' in userdata['team']: logger.info(' * name: %s' % (userdata['team']['name'])) team.name = userdata['team']['name'] participation.team = team session.commit() logger.info("Done.") return True
def do_export(self): """Run the actual export code.""" logger.info("Starting export.") # Export users users = [] with SessionGen() as session: # Get the contest contest = Contest.get_from_id(self.contest_id, session) if contest is None: logger.critical("Contest %d not found in database.", self.contest_id) exit(1) # Get participations of the contest participations = contest.participations for p in participations: users.append({ u'username': p.user.username, u'password': p.user.password, u'first_name': p.user.first_name, u'last_name': p.user.last_name }) if self.json: j = {'users': users} with io.open(os.path.join(self.export_target), "wb") as fout: json.dump(j, fout, encoding="utf-8", indent=2, sort_keys=True) else: html = """ <table> <tr> <th>Prénom</th> <th>Nom</th> <th>Pseudo</th> <th>Mot de passe</th> </tr> """ for u in users: html += """ <tr> <td>{first_name}</td> <td>{last_name}</td> <td>{username}</td> <td>{password}</td> </tr> """.format( first_name=u['first_name'], last_name=u['last_name'], username=u['username'], password=u['password'], ) html += "</table>" html = """ <!DOCTYPE html> <html lang="fr"> <head> <meta charset="utf-8"> <title>Export des utilisateurs</title> <style> table {{ font-family: arial, sans-serif; border-collapse: separate; border-spacing: 0 1em; width: 100%; }} td, th {{ border-top: 1px solid #dddddd; text-align: left; padding: 8px; }} </style> </head> <body> {} </body> </html> """.format(html) with io.open(os.path.join(self.export_target), "wb") as fout: fout.write(html.encode('utf8')) logger.info("Export finished.") return True