def setup(self, log_queue, console_level: str = 'info', file_level: str = 'debug', log_dir: str = None, file_size: int = 10485760): self.log_queue = log_queue self.rollover_info = {} # init root logger logger = logging.getLogger() logger.handlers = [] if console_level: ch = logging.StreamHandler() ch.setLevel(console_level.upper()) ch.addFilter(FilterMeta()) ch.addFilter(FilterMetaDevel()) if console_level == 'debug': ch.addFilter(FilterMetaDebug()) ch.addFilter(FilterMultiline()) ch.setFormatter(self.formatter) logger.addHandler(ch) if file_level and log_dir: fh = handlers.RotatingFileHandler(os.path.join(log_dir, f"{self.name}.log"), 'a', file_size, 10) fh.setLevel(file_level.upper()) fh.addFilter(FilterMeta()) fh.addFilter(FilterMetaDevel()) if file_level == 'debug': fh.addFilter(FilterMetaDebug()) fh.addFilter(FilterMultiline()) fh.setFormatter(self.formatter) logger.addHandler(fh) logger.setLevel(level=logging.DEBUG) process.worker_start(self)
def __setstate__(self, state): "Set back unpickle-ables." # TODO self.__dict__.update(state) self.monotonic = chelper.get_ffi()[1].get_monotonic self._timers = [] self._pipe, subpipe = multiprocessing.Pipe() process.worker_start(self, (subpipe,_NEVER)) self._async = concurrent.futures.ProcessPoolExecutor(max_workers=multiprocessing.cpu_count())
def __init__(self, name, starttime = _NEVER, verbose = False): process.Base.__init__(self, name) tree.Part.__init__(self, name) # scheduled timers (note: copied on fork/spawn, local copy have callbacks) self._timers = [] # start scheduler process.worker_start(self, (self._subpipe,starttime,verbose)) # async workers (note: not in the timers context) self._async = concurrent.futures.ProcessPoolExecutor(max_workers=multiprocessing.cpu_count()-3) if verbose: timer = self.timer(None, self.monotonic(), self._print_overhead) self.timer_add(timer) self.ready = True
def __init__(self, name): super().__init__(name) # start process.worker_start(self, (self._subpipe,)) # list of opened file descriptors self.fd = []