def __init__(self, shard): Service.__init__(self, shard) for service in config.async_config.core_services: self.connect_to(service) self.add_timeout(self.check, None, 90.0, immediately=True) self.waiting_for = {}
def __init__(self, shard, contest_id=None): """If contest_id is not None, we assume the user wants the autorestart feature. """ Service.__init__(self, shard) self.contest_id = contest_id # _local_store is a dictionary indexed by time in int(epoch) self._local_store = [] # Floating point epoch using for precise measurement of percents self._last_saved_time = time.time() # Starting point for cpu times self._prev_cpu_times = self._get_cpu_times() # Sorted list of ServiceCoord running in the same machine self._local_services = self._find_local_services() # Dict service with bool to mark if we will restart them. self._will_restart = dict((service, None if self.contest_id is None else True) for service in self._local_services) # Found process associate to the ServiceCoord. self._procs = dict((service, None) for service in self._local_services) # Previous cpu time for each service. self._services_prev_cpu_times = \ dict((service, (0.0, 0.0)) for service in self._local_services) # Start finding processes and their cputimes. self._store_resources(store=False) self.add_timeout(self._store_resources, None, 5.0) if self.contest_id is not None: self._launched_processes = set([]) self.add_timeout(self._restart_services, None, 5.0, immediately=True)
def __init__(self, shard): Service.__init__(self, shard) # Determine location of log file, and make directories. log_dir = os.path.join(config.log_dir, "cms") if not mkdir(config.log_dir) or \ not mkdir(log_dir): logger.error("Cannot create necessary directories.") self.exit() return log_filename = "%d.log" % int(time.time()) # Install a global file handler. self.file_handler = FileHandler(os.path.join(log_dir, log_filename), mode='w', encoding='utf-8') self.file_handler.setLevel(logging.DEBUG) self.file_handler.setFormatter(DetailedFormatter(False)) root_logger.addHandler(self.file_handler) # Provide a symlink to the latest log file. try: os.remove(os.path.join(log_dir, "last.log")) except OSError: pass os.symlink(log_filename, os.path.join(log_dir, "last.log")) self._last_messages = deque(maxlen=self.LAST_MESSAGES_COUNT)
def __init__(self, odir, no_test, clean, force): self.odir = odir self.no_test = no_test self.clean = clean self.force = force Service.__init__(self)
def __init__(self, shard): Service.__init__(self, shard) self.file_cacher = FileCacher(self) self.work_lock = gevent.coros.RLock() self._last_end_time = None self._total_free_time = 0 self._total_busy_time = 0 self._number_execution = 0
def __init__(self, args): Service.__init__(self, shard=args.shard) self.address = config.get("core", "listen_address") self.port = int(config.get("core", "listen_port")) + args.shard self.file_cacher = FileCacher(self) self.evaluation_service = self.connect_to( ServiceCoord('EvaluationService', 0)) self.wsgi_app = APIHandler(self)
def __init__(self, odir, no_test, safe_latex, clean, preserve_participations, force): self.odir = odir self.no_test = no_test self.safe_latex = safe_latex self.clean = clean self.preserve_participations = preserve_participations self.force = force Service.__init__(self)
def __init__(self, shard, fake_worker_time=None, listen_on_address=None): Service.__init__(self, shard, listen_on_address) self.file_cacher = FileCacher(self) self.work_lock = gevent.lock.RLock() self._last_end_time = None self._total_free_time = 0 self._total_busy_time = 0 self._number_execution = 0 self._fake_worker_time = fake_worker_time
def __init__(self, shard): Service.__init__(self, shard) self.start = 0 self.total_time = 0 self.allright = 0 self.current = -1 self.ongoing = False self.failed = False self.retry = False self.add_timeout(self.test, None, 0.2, immediately=True) self.initialized = False
def __init__(self, shard): Service.__init__(self, shard=shard) self.address = config.contest_listen_address[shard] self.port = config.contest_listen_port[shard] self.file_cacher = FileCacher(self) self.evaluation_service = self.connect_to(ServiceCoord("EvaluationService", 0)) handler = APIHandler(self) self.wsgi_app = SharedDataMiddleware( handler, {"/": ("cms.web", "practice"), "/assets": ("cms.web", "assets"), "/resources": "/home/ioi/resources"}, )
def __init__(self, shard): """Initialize the sweeper loop. shard (int): which service shard to run. """ Service.__init__(self, shard) self._executors = [] self._sweeper_start = None self._sweeper_event = Event() self._sweeper_started = False self._sweeper_timeout = None
def __init__(self, shard): Service.__init__(self, shard=shard) self.address = config.contest_listen_address[shard] self.port = config.contest_listen_port[shard] self.file_cacher = FileCacher(self) self.evaluation_service = self.connect_to( ServiceCoord('EvaluationService', 0)) handler = APIHandler(self) self.wsgi_app = SharedDataMiddleware(handler, { '/': ('cms.web', 'practice'), '/assets': ('cms.web', 'assets') })
def __init__(self, shard): """Initialize the sweeper loop. shard (int): which service shard to run. """ Service.__init__(self, shard) self._executors = [] # Set up and spawn the sweeper. # # TODO: link to greenlet and react to its death. self._sweeper_start = None self._sweeper_event = Event() gevent.spawn(self._sweeper_loop)
def __init__(self, shard): """Initialize the ScoringService. """ Service.__init__(self, shard) # Set up communication with ProxyService. self.proxy_service = self.connect_to(ServiceCoord("ProxyService", 0)) # Set up and spawn the scorer. # TODO Link to greenlet: when it dies, log CRITICAL and exit. self._scorer_queue = JoinableQueue() gevent.spawn(self._scorer_loop) # Set up and spawn the sweeper. # TODO Link to greenlet: when it dies, log CRITICAL and exit. self._sweeper_start = None self._sweeper_event = Event() gevent.spawn(self._sweeper_loop)
def __init__(self, shard, contest_id): """Start the service with the given parameters. Create an instance of the ProxyService and make it listen on the address corresponding to the given shard. Tell it to manage data for the contest with the given ID. shard (int): the shard of the service, i.e. this instance corresponds to the shard-th entry in the list of addresses (hostname/port pairs) for this kind of service in the configuration file. contest_id (int): the ID of the contest to manage. """ Service.__init__(self, shard) self.contest_id = contest_id # Store what data we already sent to rankings. This is to aid # search_jobs_not_done determine what data we didn't send yet. self.scores_sent_to_rankings = set() self.tokens_sent_to_rankings = set() # Create and spawn threads to send data to rankings. self.rankings = list() for ranking in config.rankings: proxy = RankingProxy(ranking.encode('utf-8')) gevent.spawn(proxy.run) self.rankings.append(proxy) # Send some initial data to rankings. self.initialize() self.add_timeout(self.search_jobs_not_done, None, ProxyService.JOBS_NOT_DONE_CHECK_TIME, immediately=True)
def run(self): server = Server((self.address, self.port), self.wsgi_app) gevent.spawn(server.serve_forever) Service.run(self)
def __init__(self, shard): Service.__init__(self, shard) for service in config.async.core_services: self.connect_to(service)
def __init__(self, shard): Service.__init__(self, shard) self.file_cacher = FileCacher(self) self.work_lock = gevent.coros.RLock() self._ignore_job = False
def __init__(self, shard): Service.__init__(self, shard) self.file_cacher = FileCacher(self) self.work_lock = gevent.coros.RLock()
def __init__(self, shard, contest_id=None): Service.__init__(self, shard) self.contest_id = contest_id