Exemple #1
0
    def __init__(self, shard):
        logger.initialize(ServiceCoord("Worker", shard))
        Service.__init__(self, shard, custom_logger=logger)
        self.file_cacher = FileCacher(self)

        self.work_lock = gevent.coros.RLock()
        self._ignore_job = False
Exemple #2
0
    def __init__(self, shard, contest_id=None):
        """If contest_id is not None, we assume the user wants the
        autorestart feature.

        """
        Service.__init__(self, shard)

        self.contest_id = contest_id

        # _local_store is a dictionary indexed by time in int(epoch)
        self._local_store = []
        # Floating point epoch using for precise measurement of percents
        self._last_saved_time = time.time()
        # Starting point for cpu times
        self._prev_cpu_times = self._get_cpu_times()
        # Sorted list of ServiceCoord running in the same machine
        self._local_services = self._find_local_services()
        # Dict service with bool to mark if we will restart them.
        self._will_restart = dict(
            (service, None if self.contest_id is None else True) for service in self._local_services
        )
        # Found process associate to the ServiceCoord.
        self._procs = dict((service, None) for service in self._local_services)
        # Previous cpu time for each service.
        self._services_prev_cpu_times = dict((service, (0.0, 0.0)) for service in self._local_services)
        # Start finding processes and their cputimes.
        self._store_resources(store=False)

        self.add_timeout(self._store_resources, None, 5.0)
        if self.contest_id is not None:
            self._launched_processes = set([])
            self.add_timeout(self._restart_services, None, 5.0, immediately=True)
Exemple #3
0
    def __init__(self, listen_port, handlers, parameters, shard=0,
                 listen_address=""):
        Service.__init__(self, shard)

        self.__responses = {}
        # TODO: why are the following two lines needed?
        self._RPCRequestHandler__responses = self.__responses
        self._RPCAnswerHandler__responses = self.__responses
        handlers += [(r"/rpc_request/([a-zA-Z0-9_-]+)/"
                      "([0-9]+)/([a-zA-Z0-9_-]+)",
                      RPCRequestHandler),
                     (r"/rpc_answer", RPCAnswerHandler),
                     (r"/sync_rpc_request/([a-zA-Z0-9_-]+)/"
                      "([0-9]+)/([a-zA-Z0-9_-]+)",
                      SyncRPCRequestHandler)]
        self.application = tornado.wsgi.WSGIApplication(handlers, **parameters)
        self.application.service = self

        # is_proxy_used=True means the content of the header X-Real-IP
        # is interpreted as the request IP. This means that if we're
        # behind a proxy, it can see the real IP the request is coming
        # from. But, to use it, we need to be sure we can trust it
        # (i.e., if we are not behind a proxy that sets that header,
        # we must not use it).
        real_application = self.application
        if parameters.get('is_proxy_used', False):
            real_application = WSGIXheadersMiddleware(real_application)

        self.web_server = WSGIServer((listen_address, listen_port),
                                     real_application)
Exemple #4
0
    def __init__(self, listen_port, handlers, parameters, shard=0,
                 listen_address=""):
        Service.__init__(self, shard)

        self.__responses = {}
        # TODO: why are the following two lines needed?
        self._RPCRequestHandler__responses = self.__responses
        self._RPCAnswerHandler__responses = self.__responses
        handlers += [(r"/rpc_request/([a-zA-Z0-9_-]+)/"
                      "([0-9]+)/([a-zA-Z0-9_-]+)",
                      RPCRequestHandler),
                     (r"/rpc_answer", RPCAnswerHandler),
                     (r"/sync_rpc_request/([a-zA-Z0-9_-]+)/"
                      "([0-9]+)/([a-zA-Z0-9_-]+)",
                      SyncRPCRequestHandler)]
        self.application = tornado.wsgi.WSGIApplication(handlers, **parameters)
        self.application.service = self

        # is_proxy_used=True means the content of the header X-Real-IP
        # is interpreted as the request IP. This means that if we're
        # behind a proxy, it can see the real IP the request is coming
        # from. But, to use it, we need to be sure we can trust it
        # (i.e., if we are not behind a proxy that sets that header,
        # we must not use it).
        real_application = self.application
        if parameters.get('is_proxy_used', False):
            real_application = WSGIXheadersMiddleware(real_application)

        self.web_server = WSGIServer((listen_address, listen_port),
                                     real_application)
Exemple #5
0
    def __init__(self, shard):
        Service.__init__(self, shard)

        # Determine location of log file, and make directories.
        log_dir = os.path.join(config.log_dir, "cms")
        if not mkdir(config.log_dir) or not mkdir(log_dir):
            logger.error("Cannot create necessary directories.")
            self.exit()
            return
        log_filename = "%d.log" % int(time.time())

        # Install a global file handler.
        self.file_handler = FileHandler(os.path.join(log_dir, log_filename), mode="w", encoding="utf-8")
        self.file_handler.setLevel(logging.DEBUG)
        self.file_handler.setFormatter(CustomFormatter(False))
        root_logger.addHandler(self.file_handler)

        # Provide a symlink to the latest log file.
        try:
            os.remove(os.path.join(log_dir, "last.log"))
        except OSError:
            pass
        os.symlink(log_filename, os.path.join(log_dir, "last.log"))

        self._last_messages = deque(maxlen=self.LAST_MESSAGES_COUNT)
Exemple #6
0
    def __init__(self, shard):
        Service.__init__(self, shard)

        # Determine location of log file, and make directories.
        log_dir = os.path.join(config.log_dir, "cms")
        if not mkdir(config.log_dir) or \
                not mkdir(log_dir):
            logger.error("Cannot create necessary directories.")
            self.exit()
            return
        log_filename = "%d.log" % int(time.time())

        # Install a global file handler.
        self.file_handler = FileHandler(os.path.join(log_dir, log_filename),
                                        mode='w', encoding='utf-8')
        self.file_handler.setLevel(logging.DEBUG)
        self.file_handler.setFormatter(CustomFormatter(False))
        root_logger.addHandler(self.file_handler)

        # Provide a symlink to the latest log file.
        try:
            os.remove(os.path.join(log_dir, "last.log"))
        except OSError:
            pass
        os.symlink(log_filename,
                   os.path.join(log_dir, "last.log"))

        self._last_messages = deque(maxlen=self.LAST_MESSAGES_COUNT)
Exemple #7
0
    def run(self):
        """Start the WebService.

        Both the WSGI server and the RPC server are started.

        """
        self.web_server.start()
        Service.run(self)
        self.web_server.stop()
Exemple #8
0
    def run(self):
        """Start the WebService.

        Both the WSGI server and the RPC server are started.

        """
        self.web_server.start()
        Service.run(self)
        self.web_server.stop()
Exemple #9
0
    def __init__(self, shard):
        Service.__init__(self, shard)

        self.start = 0
        self.total_time = 0
        self.allright = 0
        self.current = -1
        self.ongoing = False
        self.failed = False
        self.retry = False
        self.add_timeout(self.test, None, 0.2, immediately=True)
        self.initialized = False
Exemple #10
0
    def __init__(self, shard):
        Service.__init__(self, shard)

        self.start = 0
        self.total_time = 0
        self.allright = 0
        self.current = -1
        self.ongoing = False
        self.failed = False
        self.retry = False
        self.add_timeout(self.test, None, 0.2, immediately=True)
        self.initialized = False
Exemple #11
0
    def __init__(self, shard, custom_logger=None):
        Service.__init__(self, shard, custom_logger)

        global logger
        from cms.io.GeventLibrary import logger as _logger
        logger = _logger

        self.start = 0
        self.total_time = 0
        self.allright = 0
        self.current = -1
        self.ongoing = False
        self.failed = False
        self.retry = False
        self.add_timeout(self.test, None, 0.2, immediately=True)
        self.initialized = False
Exemple #12
0
    def __init__(self, shard):
        """Initialize the ScoringService.

        """
        Service.__init__(self, shard)

        # Set up communication with ProxyService.
        self.proxy_service = self.connect_to(ServiceCoord("ProxyService", 0))

        # Set up and spawn the scorer.
        # TODO Link to greenlet: when it dies, log CRITICAL and exit.
        self._scorer_queue = JoinableQueue()
        gevent.spawn(self._scorer_loop)

        # Set up and spawn the sweeper.
        # TODO Link to greenlet: when it dies, log CRITICAL and exit.
        self._sweeper_start = None
        self._sweeper_event = Event()
        gevent.spawn(self._sweeper_loop)
Exemple #13
0
    def __init__(self, shard, contest_id):
        logger.initialize(ServiceCoord("ScoringService", shard))
        Service.__init__(self, shard, custom_logger=logger)

        self.contest_id = contest_id

        # If for some reason (SS switched off for a while, or broken
        # connection with ES), submissions have been left without
        # score, this is the set where you want to pur their ids. Note
        # that sets != {} if and only if there is an alive timeout for
        # the method "score_old_submission".
        #
        # submission_results_to_score and submission_results_scored
        # contain pairs of (submission_id, dataset_id).
        #
        # submissions_to_token and submission_tokened contain scalar
        # values of submission_id.
        self.submission_results_to_score = set()
        self.submissions_to_token = set()
        self.scoring_old_submission = False

        # We need to load every submission at start, but we don't want
        # to invalidate every score so that we can simply load the
        # score-less submissions. So we keep a set of submissions that
        # we analyzed (for scoring and for tokens).
        self.submission_results_scored = set()
        self.submissions_tokened = set()

        # Create and spawn threads to send data to rankings.
        self.rankings = list()
        for ranking in config.rankings:
            proxy = RankingProxy(ranking)
            gevent.spawn(proxy.run)
            self.rankings.append(proxy)

        self.rankings_initialize()

        self.add_timeout(self.search_jobs_not_done, None,
                         ScoringService.JOBS_NOT_DONE_CHECK_TIME,
                         immediately=True)
Exemple #14
0
    def __init__(self, shard):
        logger.initialize(ServiceCoord("LogService", shard))
        Service.__init__(self, shard, custom_logger=logger)

        log_dir = os.path.join(config.log_dir, "cms")
        if not mkdir(config.log_dir) or \
               not mkdir(log_dir):
            logger.error("Cannot create necessary directories.")
            self.exit()
            return

        log_filename = "%d.log" % int(time.time())
        self._log_file = codecs.open(os.path.join(log_dir, log_filename),
                                     "w", "utf-8")
        try:
            os.remove(os.path.join(log_dir, "last.log"))
        except OSError:
            pass
        os.symlink(log_filename,
                   os.path.join(log_dir, "last.log"))

        self._last_messages = []
Exemple #15
0
    def __init__(self, shard, contest_id=None):
        """If contest_id is not None, we assume the user wants the
        autorestart feature.

        """
        Service.__init__(self, shard)

        self.contest_id = contest_id

        # _local_store is a dictionary indexed by time in int(epoch)
        self._local_store = []
        # Floating point epoch using for precise measurement of percents
        self._last_saved_time = time.time()
        # Starting point for cpu times
        self._prev_cpu_times = self._get_cpu_times()
        # Sorted list of ServiceCoord running in the same machine
        self._local_services = self._find_local_services()
        # Dict service with bool to mark if we will restart them.
        self._will_restart = dict(
            (service, None if self.contest_id is None else True)
            for service in self._local_services)
        # Found process associate to the ServiceCoord.
        self._procs = dict((service, None) for service in self._local_services)
        # Previous cpu time for each service.
        self._services_prev_cpu_times = \
            dict((service, (0.0, 0.0)) for service in self._local_services)
        # Start finding processes and their cputimes.
        self._store_resources(store=False)

        self.add_timeout(self._store_resources, None, 5.0)
        if self.contest_id is not None:
            self._launched_processes = set([])
            self.add_timeout(self._restart_services,
                             None,
                             5.0,
                             immediately=True)
Exemple #16
0
    def __init__(self, shard, contest_id):
        """Start the service with the given parameters.

        Create an instance of the ProxyService and make it listen on
        the address corresponding to the given shard. Tell it to
        manage data for the contest with the given ID.

        shard (int): the shard of the service, i.e. this instance
            corresponds to the shard-th entry in the list of addresses
            (hostname/port pairs) for this kind of service in the
            configuration file.
        contest_id (int): the ID of the contest to manage.

        """
        Service.__init__(self, shard)

        self.contest_id = contest_id

        # Store what data we already sent to rankings. This is to aid
        # search_jobs_not_done determine what data we didn't send yet.
        self.scores_sent_to_rankings = set()
        self.tokens_sent_to_rankings = set()

        # Create and spawn threads to send data to rankings.
        self.rankings = list()
        for ranking in config.rankings:
            proxy = RankingProxy(ranking.encode('utf-8'))
            gevent.spawn(proxy.run)
            self.rankings.append(proxy)

        # Send some initial data to rankings.
        self.initialize()

        self.add_timeout(self.search_jobs_not_done, None,
                         ProxyService.JOBS_NOT_DONE_CHECK_TIME,
                         immediately=True)
Exemple #17
0
    def __init__(self, shard, contest_id):
        """Start the service with the given parameters.

        Create an instance of the ProxyService and make it listen on
        the address corresponding to the given shard. Tell it to
        manage data for the contest with the given ID.

        shard (int): the shard of the service, i.e. this instance
            corresponds to the shard-th entry in the list of addresses
            (hostname/port pairs) for this kind of service in the
            configuration file.
        contest_id (int): the ID of the contest to manage.

        """
        Service.__init__(self, shard)

        self.contest_id = contest_id

        # Store what data we already sent to rankings. This is to aid
        # search_jobs_not_done determine what data we didn't send yet.
        self.scores_sent_to_rankings = set()
        self.tokens_sent_to_rankings = set()

        # Create and spawn threads to send data to rankings.
        self.rankings = list()
        for ranking in config.rankings:
            proxy = RankingProxy(ranking.encode('utf-8'))
            gevent.spawn(proxy.run)
            self.rankings.append(proxy)

        # Send some initial data to rankings.
        self.initialize()

        self.add_timeout(self.search_jobs_not_done, None,
                         ProxyService.JOBS_NOT_DONE_CHECK_TIME,
                         immediately=True)
Exemple #18
0
 def __init__(self, shard):
     Service.__init__(self, shard)
     for service in config.async.core_services:
         self.connect_to(service)
Exemple #19
0
    def __init__(self, shard):
        Service.__init__(self, shard)
        self.file_cacher = FileCacher(self)

        self.work_lock = gevent.coros.RLock()
        self._ignore_job = False
Exemple #20
0
    def __init__(self, shard):
        Service.__init__(self, shard)
        self.file_cacher = FileCacher(self)

        self.work_lock = gevent.coros.RLock()
        self._ignore_job = False
Exemple #21
0
 def __init__(self, shard):
     Service.__init__(self, shard)
     for service in config. async .core_services:
         self.connect_to(service)
Exemple #22
0
 def __init__(self, shard):
     logger.initialize(ServiceCoord("Checker", shard))
     Service.__init__(self, shard, custom_logger=logger)
     for service in config.async.core_services:
         self.connect_to(service)