def __init__(self, shard, contest): parameters = { "login_url": "/", "template_path": pkg_resources.resource_filename("cms.server.contest", "templates"), "static_files": [("cms.server", "static"), ("cms.server.contest", "static")], "cookie_secret": base64.b64encode(config.secret_key), "debug": config.tornado_debug, "is_proxy_used": config.is_proxy_used, } try: listen_address = config.contest_listen_address[shard] listen_port = config.contest_listen_port[shard] except IndexError: raise ConfigError("Wrong shard number for %s, or missing " "address/port configuration. Please check " "contest_listen_address and contest_listen_port " "in cms.conf." % __name__) super(ContestWebServer, self).__init__(listen_port, HANDLERS, parameters, shard=shard, listen_address=listen_address) self.contest = contest # This is a dictionary (indexed by username) of pending # notification. Things like "Yay, your submission went # through.", not things like "Your question has been replied", # that are handled by the db. Each username points to a list # of tuples (timestamp, subject, text). self.notifications = {} # Retrieve the available translations. self.langs = { lang_code: wrap_translations_for_tornado(trans) for lang_code, trans in get_translations().iteritems() } self.file_cacher = FileCacher(self) self.evaluation_service = self.connect_to( ServiceCoord("EvaluationService", 0)) self.scoring_service = self.connect_to( ServiceCoord("ScoringService", 0)) ranking_enabled = len(config.rankings) > 0 self.proxy_service = self.connect_to(ServiceCoord("ProxyService", 0), must_be_present=ranking_enabled) printing_enabled = config.printer is not None self.printing_service = self.connect_to( ServiceCoord("PrintingService", 0), must_be_present=printing_enabled)
def __init__(self, contest_ids, export_target, dump_files, dump_model, skip_generated, skip_submissions, skip_user_tests, skip_print_jobs): if contest_ids is None: with SessionGen() as session: contests = session.query(Contest).all() self.contests_ids = [contest.id for contest in contests] users = session.query(User).all() self.users_ids = [user.id for user in users] tasks = session.query(Task)\ .filter(Task.contest_id.is_(None)).all() self.tasks_ids = [task.id for task in tasks] else: # FIXME: this is ATM broken, because if you export a contest, you # then export the users who participated in it and then all of the # contests those users participated in. self.contests_ids = contest_ids self.users_ids = [] self.tasks_ids = [] self.dump_files = dump_files self.dump_model = dump_model self.skip_generated = skip_generated self.skip_submissions = skip_submissions self.skip_user_tests = skip_user_tests self.skip_print_jobs = skip_print_jobs self.export_target = export_target # If target is not provided, we use the contest's name. if len(export_target) == 0: self.export_target = "dump_%s.tar.gz" % date.today().isoformat() logger.warning("export_target not given, using \"%s\"", self.export_target) self.file_cacher = FileCacher()
def __init__(self, rulesdir, source, output, party, wdir, extra=[ "-lualatex=lualatex --interaction=nonstopmode " "--shell-restricted --nosocket %O %S" ], ignore=set(), ignore_ext=set(), do_copy=set()): super(SafeLaTeXRule, self).__init__(rulesdir) self.source = source self.output = output self.party = party self.wdir = wdir self.file_cacher = FileCacher() self.extra = extra self.command = [ "/usr/bin/latexmk", "-g", "-pdflua", "-deps", "-deps-out=.deps" ] + self.extra + [source] self.ignore = copy(ignore) self.ignore_ext = copy(ignore_ext) self.do_copy = copy(do_copy)
def clean_files(session, dry_run): filecacher = FileCacher() files = set(file[0] for file in filecacher.list()) logger.info("A total number of %d files are present in the file store", len(files)) for cls in [ Attachment, Executable, File, Manager, PrintJob, Statement, Testcase, UserTest, UserTestExecutable, UserTestFile, UserTestManager, UserTestResult ]: for col in ["input", "output", "digest"]: if hasattr(cls, col): found_digests = set() digests = session.query(cls).all() digests = [getattr(obj, col) for obj in digests] found_digests |= set(digests) found_digests.discard(FileCacher.TOMBSTONE_DIGEST) logger.info("Found %d digests while scanning %s.%s", len(found_digests), cls.__name__, col) files -= found_digests logger.info("%d digests are orphan.", len(files)) total_size = 0 for orphan in files: total_size += filecacher.get_size(orphan) logger.info("Orphan files take %s bytes of disk space", "{:,}".format(total_size)) if not dry_run: for count, orphan in enumerate(files): filecacher.delete(orphan) if count % 100 == 0: logger.info("%d files deleted from the file store", count) logger.info("All orphan files have been deleted")
def build(self): file_cacher = FileCacher(path=os.path.join(self.wdir, ".cache")) with chdir(self.wdir): contestconfig = ContestConfig( os.path.join(self.wdir, ".rules"), "hidden contest", relevant_language=(self.language if self.language != "ALL" else None), ignore_latex=self.no_latex, minimal=self.minimal) copyifnecessary( os.path.join(contestconfig._get_ready_dir(), "contest-template.py"), os.path.join(self.wdir, "c.py")) contestconfig._readconfig("c.py") contestconfig._task(self.task, contestconfig.full_feedback, None, self.minimal, standalone_task=True) if not self.minimal: cdb = contestconfig._makecontest() test_udb = contestconfig._makeuser( contestconfig._mytestuser.username) test_gdb = contestconfig._makegroup( contestconfig._mytestuser.group.name, cdb) # We're not putting the test user on any team for testing # (shouldn't be needed). test_pdb = contestconfig._makeparticipation( contestconfig._mytestuser.username, cdb, test_udb, test_gdb, None) for t in contestconfig.tasks.values(): tdb = t._makedbobject(cdb, file_cacher) t._make_test_submissions(test_pdb, tdb, self.local_test) statements = list(contestconfig.tasks.values())[0]._statements if self.language == "ALL": return [ os.path.abspath(s.file_) for s in list(statements.values()) ] if self.language is not None: if self.language in statements: return os.path.abspath(statements[self.language].file_) else: return None primary_statements = [ s for s in list(statements.values()) if s.primary ] if len(primary_statements) == 0: return None elif len(primary_statements) == 1: return os.path.abspath(primary_statements[0].file_) else: raise Exception("More than one primary statement")
def __init__(self, path, prefix, update, no_statement, contest_id, loader_class): self.file_cacher = FileCacher() self.prefix = prefix self.update = update self.no_statement = no_statement self.contest_id = contest_id self.loader = loader_class(os.path.abspath(path), self.file_cacher)
def __init__(self, contest_id, spool_dir): self.contest_id = contest_id self.spool_dir = spool_dir self.upload_dir = os.path.join(self.spool_dir, "upload") self.contest = None self.submissions = None self.file_cacher = FileCacher()
def __init__(self, path, contest_id, force, loader_class, full): self.old_contest_id = contest_id self.force = force self.full = full self.file_cacher = FileCacher() self.loader = loader_class(os.path.realpath(path), self.file_cacher)
def __init__(self, listen_port, handlers, parameters, shard=0, listen_address=""): super(WebService, self).__init__(shard) static_files = parameters.pop('static_files', []) rpc_enabled = parameters.pop('rpc_enabled', False) rpc_auth = parameters.pop('rpc_auth', None) auth_middleware = parameters.pop('auth_middleware', None) is_proxy_used = parameters.pop('is_proxy_used', None) num_proxies_used = parameters.pop('num_proxies_used', None) self.wsgi_app = tornado.wsgi.WSGIApplication(handlers, **parameters) self.wsgi_app.service = self for entry in static_files: # TODO If we will introduce a flag to trigger autoreload in # Jinja2 templates, use it to disable the cache arg here. self.wsgi_app = SharedDataMiddleware( self.wsgi_app, {"/static": entry}, cache=True, cache_timeout=SECONDS_IN_A_YEAR, fallback_mimetype="application/octet-stream") self.file_cacher = FileCacher(self) self.wsgi_app = FileServerMiddleware(self.file_cacher, self.wsgi_app) if rpc_enabled: self.wsgi_app = DispatcherMiddleware( self.wsgi_app, {"/rpc": RPCMiddleware(self, rpc_auth)}) # The authentication middleware needs to be applied before the # ProxyFix as otherwise the remote address it gets is the one # of the proxy. if auth_middleware is not None: self.wsgi_app = auth_middleware(self.wsgi_app) self.auth_handler = self.wsgi_app # If we are behind one or more proxies, we'll use the content # of the X-Forwarded-For HTTP header (if provided) to determine # the client IP address, ignoring the one the request came from. # This allows to use the IP lock behind a proxy. Activate it # only if all requests come from a trusted source (if clients # were allowed to directlty communicate with the server they # could fake their IP and compromise the security of IP lock). if num_proxies_used is None: if is_proxy_used: num_proxies_used = 1 else: num_proxies_used = 0 if num_proxies_used > 0: self.wsgi_app = ProxyFix(self.wsgi_app, num_proxies_used) self.web_server = WSGIServer((listen_address, listen_port), self)
def setUp(self): self.file_cacher = FileCacher() #self.file_cacher = FileCacher(self, path="fs-storage") self.cache_base_path = self.file_cacher.file_dir self.cache_path = None self.content = None self.fake_content = None self.digest = None self.file_obj = None
def __init__(self, shard): Service.__init__(self, shard) self.file_cacher = FileCacher(self) self.work_lock = gevent.coros.RLock() self._last_end_time = None self._total_free_time = 0 self._total_busy_time = 0 self._number_execution = 0
def __init__(self, path, drop, test, zero_time, user_number, loader_class): self.drop = drop self.test = test self.zero_time = zero_time self.user_number = user_number self.file_cacher = FileCacher() self.loader = loader_class(os.path.realpath(path), self.file_cacher)
def __init__(self, path, zero_time, import_tasks, update_contest, update_tasks, no_statements, loader_class): self.zero_time = zero_time self.import_tasks = import_tasks self.update_contest = update_contest self.update_tasks = update_tasks self.no_statements = no_statements self.file_cacher = FileCacher() self.loader = loader_class(os.path.abspath(path), self.file_cacher)
def __init__(self, args): Service.__init__(self, shard=args.shard) self.address = config.get("core", "listen_address") self.port = int(config.get("core", "listen_port")) + args.shard self.file_cacher = FileCacher(self) self.evaluation_service = self.connect_to( ServiceCoord('EvaluationService', 0)) self.wsgi_app = APIHandler(self)
def debugSubmission(submission_id, dataset_id, testcase_codename): config.keep_sandbox = True file_cacher = FileCacher() with SessionGen() as session: submission = session.query(Submission)\ .filter(Submission.id == submission_id)\ .first() if submission is None: logger.error("There's no submission with id %d" % submission_id) return False if dataset_id is None: dataset = submission.task.active_dataset dataset_id = submission.task.active_dataset_id else: dataset = session.query(Dataset)\ .filter(Dataset.id == dataset_id)\ .first() # Compilation operation = ESOperation(ESOperation.COMPILATION, submission_id, dataset_id) comp_job = CompilationJob.from_submission(operation, submission, dataset) task_type = get_task_type(comp_job.task_type, comp_job.task_type_parameters) task_type.execute_job(comp_job, file_cacher) for sandbox_path in comp_job.sandboxes: logger.info("Compilation sandbox created in %s" % sandbox_path) # Check if the compilation is successful result = submission.get_result(dataset) if result is None or result.compilation_failed(): logger.error("Compilatoin Failed") return True # Evaluation operation = ESOperation(ESOperation.EVALUATION, submission_id, dataset_id, testcase_codename) eval_job = EvaluationJob.from_submission(operation, submission, dataset) task_type = get_task_type(eval_job.task_type, eval_job.task_type_parameters) task_type.execute_job(eval_job, file_cacher) for sandbox_path in eval_job.sandboxes: logger.info("Evaluation sandbox created in %s" % sandbox_path) return True
def __init__(self, shard, fake_worker_time=None, listen_on_address=None): Service.__init__(self, shard, listen_on_address) self.file_cacher = FileCacher(self) self.work_lock = gevent.lock.RLock() self._last_end_time = None self._total_free_time = 0 self._total_busy_time = 0 self._number_execution = 0 self._fake_worker_time = fake_worker_time
def __init__(self, drop, import_source, load_files, load_model, skip_generated, skip_submissions, skip_user_tests): self.drop = drop self.load_files = load_files self.load_model = load_model self.skip_generated = skip_generated self.skip_submissions = skip_submissions self.skip_user_tests = skip_user_tests self.import_source = import_source self.import_dir = import_source self.file_cacher = FileCacher()
def __init__(self, path, yes, zero_time, import_tasks, update_contest, update_tasks, no_statements, delete_stale_participations, loader_class): self.yes = yes self.zero_time = zero_time self.import_tasks = import_tasks self.update_contest = update_contest self.update_tasks = update_tasks self.no_statements = no_statements self.delete_stale_participations = delete_stale_participations self.file_cacher = FileCacher() self.loader = loader_class(os.path.abspath(path), self.file_cacher)
def __init__(self, contest_id, export_target, json): self.contest_id = contest_id self.export_target = export_target self.json = json # If target is not provided, we use the curent date. if export_target == "": self.export_target = "users_c%d_%s.html" % \ (self.contest_id, date.today().isoformat()) logger.warning("export_target not given, using \"%s\"", self.export_target) self.file_cacher = FileCacher()
def __init__(self, shard): logger.initialize(ServiceCoord("TestFileCacher", shard)) TestService.__init__(self, shard, custom_logger=logger) # Assume we store the cache in "./cache/fs-cache-TestFileCacher-0/" self.cache_base_path = os.path.join(config.cache_dir, "fs-cache-TestFileCacher-0") self.cache_path = None self.content = None self.fake_content = None self.digest = None self.file_obj = None self.file_cacher = FileCacher(self)
def add_testcases(archive, input_template, output_template, task_name, dataset_description=None, contest_name=None, public=False, overwrite=False): with SessionGen() as session: task = session.query(Task)\ .filter(Task.name == task_name).first() if not task: logger.error("No task called %s found." % task_name) return False dataset = task.active_dataset if dataset_description is not None: dataset = session.query(Dataset)\ .filter(Dataset.task_id == task.id)\ .filter(Dataset.description == dataset_description)\ .first() if not dataset: logger.error("No dataset called %s found." % dataset_description) return False if contest_name is not None: contest = session.query(Contest)\ .filter(Contest.name == contest_name).first() if task.contest != contest: logger.error("%s is not in %s" % (task_name, contest_name)) return False file_cacher = FileCacher() # Get input/output file names templates input_re = re.compile( re.escape(input_template).replace("\\*", "(.*)") + "$") output_re = re.compile( re.escape(output_template).replace("\\*", "(.*)") + "$") try: successful_subject, successful_message = \ import_testcases_from_zipfile( session, file_cacher, dataset, archive, input_re, output_re, overwrite, public) except Exception as error: logger.error(str(error)) return False logger.info(successful_subject) logger.info(successful_message) return True
def __init__(self, shard): parameters = { "ui_modules": views, "template_path": pkg_resources.resource_filename("cms.server.admin", "templates"), "static_files": [("cms.server", "static"), ("cms.server.admin", "static")], "cookie_secret": base64.b64encode(config.secret_key), "debug": config.tornado_debug, "auth_middleware": AWSAuthMiddleware, "rpc_enabled": True, "rpc_auth": self.is_rpc_authorized, "xsrf_cookies": True, } super(AdminWebServer, self).__init__(config.admin_listen_port, HANDLERS, parameters, shard=shard, listen_address=config.admin_listen_address) # A list of pending notifications. self.notifications = [] self.file_cacher = FileCacher(self) self.admin_web_server = self.connect_to( ServiceCoord("AdminWebServer", 0)) self.queue_service = self.connect_to(ServiceCoord("QueueService", 0)) # TODO: does it make sense to use a random one? self.evaluation_services = self.connect_to( ServiceCoord("EvaluationService", 0)) self.scoring_service = self.connect_to( ServiceCoord("ScoringService", 0)) ranking_enabled = len(config.rankings) > 0 self.proxy_service = self.connect_to(ServiceCoord("ProxyService", 0), must_be_present=ranking_enabled) self.resource_services = [] for i in range(get_service_shards("ResourceService")): self.resource_services.append( self.connect_to(ServiceCoord("ResourceService", i))) self.logservice = self.connect_to(ServiceCoord("LogService", 0))
def __init__(self, shard): """Initialize the PrintingService. """ super(PrintingService, self).__init__(shard) self.file_cacher = FileCacher(self) self.add_executor(PrintingExecutor(self.file_cacher)) if config.printer is None: logger.info("Printing is disabled, so the PrintingService is " "idle.") return
def extract_complexity(task_id, file_lengther=None): """Extract the complexity of all submissions of the task. The results are stored in a file task_<id>.info task_id (int): the id of the task we are interested in. file_lengther (type): a File-like object that tell the dimension of the input (see example above for how to write one). return (int): 0 if operation was successful. """ with SessionGen() as session: task = Task.get_from_id(task_id, session) if task is None: return -1 # Extracting the length of the testcase. file_cacher = FileCacher() testcases_lengths = [ file_length(testcase.input, file_cacher, file_lengther) for testcase in task.testcases ] file_cacher.purge_cache() # Compute the complexity of the solutions. with io.open("task_%s.info" % task_id, "wt", encoding="utf-8") as info: for submission in task.contest.get_submissions(): if submission.task_id == task_id and \ submission.evaluated(): print(submission.participation.user.username) result = extract_complexity_submission( testcases_lengths, submission) if result[1] is None: continue info.write("Submission: %s" % submission.id) info.write(" - user: %15s" % submission.participation.user.username) info.write(" - task: %s" % task.name) if result[0] is not None: info.write(" - score: %6.2lf" % result[0]) info.write(" - complexity: %20s" % complexity_to_string(result[1])) if result[2] is not None: info.write(" - confidence %5.1lf" % result[2]) info.write("\n") return 0
def add_statement(task_name, language_code, statement_type, statement_file, overwrite): logger.info( "Adding the statement(language: %s) of task %s " "in the database.", language_code, task_name) if statement_type is None: return False if not os.path.exists(statement_file): logger.error("Statement file (path: %s) does not exist.", statement_file) return False with SessionGen() as session: task = session.query(Task)\ .filter(Task.name == task_name).first() if not task: logger.error("No task named %s", task_name) return False try: file_cacher = FileCacher() digest = file_cacher.put_file_from_path( statement_file, "%s Statement (lang: %s) for task %s" % (statement_type.upper(), language_code, task_name)) except Exception: logger.error("Task statement storage failed.", exc_info=True) arr = session.query(Statement)\ .filter(Statement.language == language_code)\ .filter(Statement.statement_type == statement_type)\ .filter(Statement.task == task)\ .all() if arr: # Statement already exists if overwrite: logger.info("Overwriting already existing statement.") session.delete(arr[0]) session.commit() else: logger.error("A statement of the given type and language " "already exists. Not overwriting.") return False statement = Statement(language_code, statement_type, digest, task=task) session.add(statement) session.commit() logger.info("Statement added.") return True
def __init__(self, shard): parameters = { "static_files": [("cms.server", "static"), ("cms.server.admin", "static")], "cookie_secret": hex_to_bin(config.secret_key), "debug": config.tornado_debug, "auth_middleware": AWSAuthMiddleware, "rpc_enabled": True, "rpc_auth": self.is_rpc_authorized, "xsrf_cookies": True, } super(AdminWebServer, self).__init__(config.admin_listen_port, HANDLERS, parameters, shard=shard, listen_address=config.admin_listen_address) self.jinja2_environment = AWS_ENVIRONMENT # A list of pending notifications. self.notifications = [] self.file_cacher = FileCacher(self) self.admin_web_server = self.connect_to( ServiceCoord("AdminWebServer", 0)) self.evaluation_service = self.connect_to( ServiceCoord("EvaluationService", 0)) self.scoring_service = self.connect_to( ServiceCoord("ScoringService", 0)) ranking_enabled = len(config.rankings) > 0 self.proxy_service = self.connect_to(ServiceCoord("ProxyService", 0), must_be_present=ranking_enabled) self.resource_services = [] for i in range(get_service_shards("ResourceService")): self.resource_services.append( self.connect_to(ServiceCoord("ResourceService", i))) self.logservice = self.connect_to(ServiceCoord("LogService", 0))
def file_length(digest, file_cacher=None, file_lengther=None): """Compute the length of the file identified by digest. digest (string): the digest of the file. file_cacher (FileCacher): the cacher to use, or None. file_lengther (type): a File-like object that tell the dimension of the input (see example above for how to write one). return (int): the length of the tile. """ if file_cacher is None: file_cacher = FileCacher() if file_lengther is None: file_lengther = FileLengther lengther = file_lengther() file_cacher.get_file_to_fobj(digest, lengther) return lengther.tell()
def __init__(self, path, prefix, override_name, update, no_statement, contest_id, loader_class): """Create the importer object for a task. path (string): the path to the file or directory to import. prefix (string): an optional prefix added to the task name. override_name (string): an optional new name for the task. update (bool): if the task already exists, try to update it. no_statement (bool): do not try to import the task statement. contest_id (int): if set, the new task will be tied to this contest. """ self.file_cacher = FileCacher() self.prefix = prefix self.override_name = override_name self.update = update self.no_statement = no_statement self.contest_id = contest_id self.loader = loader_class(os.path.abspath(path), self.file_cacher)
def clean_files(session, dry_run): filecacher = FileCacher() files = set(file[0] for file in filecacher.list()) logger.info("A total number of %d files are present in the file store", len(files)) found_digests = enumerate_files(session) logger.info("Found %d digests while scanning", len(found_digests)) files -= found_digests logger.info("%d digests are orphan.", len(files)) total_size = 0 for orphan in files: total_size += filecacher.get_size(orphan) logger.info("Orphan files take %s bytes of disk space", "{:,}".format(total_size)) if not dry_run: for count, orphan in enumerate(files): filecacher.delete(orphan) if count % 100 == 0: logger.info("%d files deleted from the file store", count) logger.info("All orphan files have been deleted")
def __init__(self, contest_id, export_target, dump_files, dump_model, skip_generated, skip_submissions, skip_user_tests): self.contest_id = contest_id self.dump_files = dump_files self.dump_model = dump_model self.skip_generated = skip_generated self.skip_submissions = skip_submissions self.skip_user_tests = skip_user_tests # If target is not provided, we use the contest's name. if export_target == "": with SessionGen() as session: contest = Contest.get_from_id(self.contest_id, session) self.export_target = "dump_%s.tar.gz" % contest.name logger.warning("export_target not given, using \"%s\"" % self.export_target) else: self.export_target = export_target self.file_cacher = FileCacher()