def __cleanup(self) -> None: MediaObserver.__LOG.debug("Detaching event handlers") self.__event_handler.shutdown() MediaObserver.__LOG.debug("Shutting down observers") try: self.__observer.unschedule_all() self.__observer.stop() self.__observer.join() MediaObserver.__LOG.debug( f"Stop observing {self.__converter_config.media_in_folder}") except RuntimeError as e: MediaObserver.__LOG.debug(f"{e}", exc_info=True) MediaObserver.__LOG.debug("Shutting down logging service") LogManager.get_instance().shutdown()
def __init__(self, rules: List[Rule], max_threads: int = DEFAULT_MAX_THREADS): super().__init__() FileEventHandler.__LOG = LogManager.get_instance().get( LogManager.Logger.OBSERVER) self.__dispatcher = FileDispatcher(rules) self.__executor = ThreadPoolExecutor(max_workers=max_threads)
def __cleanup(self) -> None: FileObserver.__LOG.debug("Detaching event handlers") self.__event_handler.shutdown() FileObserver.__LOG.debug("Shutting down observers") try: for directory in self.__dir_obs_dict: self.__dir_obs_dict[directory].unschedule_all() # stop observer if interrupted self.__dir_obs_dict[directory].stop() # Wait until the thread terminates before exit self.__dir_obs_dict[directory].join() FileObserver.__LOG.debug(f"Stop observing {directory}") except RuntimeError as e: FileObserver.__LOG.debug(f"{e}", exc_info=True) FileObserver.__LOG.debug("Shutting down logging service") LogManager.get_instance().shutdown()
def __init__(self, ffmpeg: str = FFMPEG_BIN, ffprobe: str = FFPROBE_BIN): super().__init__() Validation.is_installed(ffmpeg, f"Wrong path for {FFmpeg.FFMPEG_BIN}") Validation.is_installed(ffprobe, f"Wrong path for {FFmpeg.FFPROBE_BIN}") FFmpeg.__LOG = LogManager.get_instance().get( LogManager.Logger.CONVERTER) self.__converter = Converter(ffmpeg, ffprobe)
def __init__(self, rules: List[Rule]): super().__init__() FileDispatcher.__LOG = LogManager.get_instance().get( LogManager.Logger.DISPATCHER) self.__rules = rules if len(rules) == 0: FileDispatcher.__LOG.warning( "No rule specified. All files will be skipped.")
def __init__(self, conf: Conf): super().__init__() # validate python version Validation.python_version( Evaluator.REQUIRED_PYTHON, f"Unsupported Python version.\n" f"Required Python {Evaluator.REQUIRED_PYTHON[0]}.{Evaluator.REQUIRED_PYTHON[1]} or higher." ) Validation.can_read( conf.dataset_train, f"Training set file *must* exists and be readable. " f"Current file: '{conf.dataset_train}'.\n" f"Training set path (fully qualified) can be specified in conf.ini file or using Conf object." ) Validation.can_read( conf.dataset_test, f"Test set file *must* exists and be readable. " f"Current file: '{conf.dataset_test}'.\n" f"Test set path (fully qualified) can be specified in conf.ini file or using Conf object." ) self.__LOG = LogManager.get_instance().logger(LogManager.Logger.EVAL) self.__conf = conf # using full dataset as training set self.__training = Set(pd.read_csv(self.conf.dataset_train)) # load test set if it has same format as training_set.csv provided # as example file see ./res/dataset/test_set_no_index.csv self.__test = Set(pd.read_csv(self.conf.dataset_test)) # load test set if it has header (F1-20 and CLASS row) and index, so a test test saved using # command pd.to_csv('/path', index=True) # as example file see ./res/dataset/test_set_index.csv # self.__test = Set(pd.read_csv(self.conf.dataset_test, index_col=0)) # load test set if it does not have header row (does not have F1-20 and CLASS row) and # it is was not saved using command pd.to_csv('/path', index=True), so it has not index # as example file see ./res/dataset/test_set_no_index_features.csv # self.__test = Set(pd.read_csv(self.conf.dataset_test, header=None, # names=[f"F{i}" for i in range(1, 21)] + ["CLASS"])) # current classifiers used self.__classifiers = { Evaluator._MULTILAYER_PERCEPTRON: None, Evaluator._SUPPORT_VECTOR_MACHINE: None, # Evaluator._DECISION_TREE: None, Evaluator._RANDOM_FOREST: None, # Evaluator._KNEAREST_NEIGHBORS: None, # Evaluator._STOCHASTIC_GRADIENT_DESCENT: None, Evaluator._ADA_BOOST: None, # Evaluator._NAIVE_BAYES: None, # Evaluator._KMEANS: None }
def __init__(self, max_processes: int = DEFAULT_MAX_PROCESSES, converter: ConverterFactory.Converters = DEFAULT_CONVERTER): super().__init__() MediaEventHandler.__LOG = LogManager.get_instance().get( LogManager.Logger.OBSERVER) self.__converter = ConverterFactory.get_type(converter) self.__executor = ProcessPoolExecutor( max_workers=max_processes, initializer=MediaEventHandler.__init_worker)
def __init_loggers(cls, log_filename: str) -> None: log_manager = LogManager.get_instance() log_manager.load(log_filename) MediaObserver.__LOG = log_manager.get(LogManager.Logger.OBSERVER)
def __init_logging(self): self.__LOG_MANAGER = LogManager.get_instance() # get root logger self.__LOG = self.__LOG_MANAGER.logger(LogManager.Logger.ROOT)