def start(self) -> None: # Construct configuration file if len(sys.argv) > 1: config_file = sys.argv[1] else: config_file = f"{Common.get_proj_root_path()}/res/conf/conf.ini" try: Validation.is_file_readable( config_file, f"Error on '{config_file}': configuration file *must* exists and be readable" ) dispatcher_config = DispatcherConfig.get_instance() dispatcher_config.load_from(config_file) self.__file_observer = FileObserver(dispatcher_config) atexit.register(self.stop) # Blocking call self.__file_observer.start() except SyntaxError: print( f"Syntax error in configuration file.\n" f"Make sure to follow JSON format while defining formats, sources, destinations and rules." ) except Exception as e: print(e)
def __init__(self, dispatcher_config: DispatcherConfig): """ :param dispatcher_config: :raise """ super().__init__() print(f"version: {__version__.__version__}") # Validate python version Validation.python_version(FileObserver.PY_VERSION_MIN, f"Python version required >= {FileObserver.PY_VERSION_MIN}") self.__dispatcher_config = dispatcher_config # Load loggers self.__init_loggers(dispatcher_config.log_filename) # Validate rules self.__validate_rules() # Check permissions self.__check_permissions() self.__dir_obs_dict = self.__fill_dict(dispatcher_config.dispatcher_sources) self.__event_handler = FileEventHandler( RuleBuilder.build_list( dispatcher_config.dispatcher_rules, dispatcher_config.dispatcher_formats, dispatcher_config.dispatcher_sources, dispatcher_config.dispatcher_destinations ), dispatcher_config.general_threads )
def get(cls, logger: Logger) -> logging.Logger: Validation.not_none(logger) if logger == cls.Logger.ROOT: return cls._LOGGER_ROOT elif logger == cls.Logger.OBSERVER: return cls._LOGGER_OBSERVER elif logger == cls.Logger.CONVERTER: return cls._LOGGER_CONVERTER else: raise NotImplementedError
def logger(cls, logger: Logger) -> logging.Logger: Validation.not_none(logger) if logger == cls.Logger.ROOT: return cls.__LOGGER_ROOT elif logger == cls.Logger.MCC: return cls.__LOGGER_MCC elif logger == cls.Logger.EVAL: return cls.__LOGGER_EVAL else: raise NotImplementedError
def load_from(self, config_file: str) -> None: """ :param config_file: :raise: SyntaxError if there is a syntax error in configuration file """ Validation.is_file_readable( config_file, f"File '{config_file}' *must* exists and be readable") with self.__LOCK: self.__config_parser.read(config_file) self.__upload_config()
def load_from(cls, log_config_file: str) -> None: from util import Validation Validation.is_file_readable( log_config_file, f"File '{log_config_file}' *must* exists and be readable") with cls.__LOCK: logging.config.fileConfig(fname=log_config_file) # loggers cls._LOGGER_ROOT = logging.getLogger(LogManager.Logger.ROOT.value) cls._LOGGER_OBSERVER = logging.getLogger( LogManager.Logger.OBSERVER.value) cls._LOGGER_CONVERTER = logging.getLogger( LogManager.Logger.CONVERTER.value)
def __init_conf(self): # construct configuration file if len(sys.argv) > 1: conf_path = sys.argv[1] else: conf_path = f"{Path(Common.get_root_path(), Client._DEFAULT_CONF_PATH)}" Validation.is_file_readable( conf_path, f"Error on '{conf_path}': configuration file *must* exists and be readable" ) self.__conf = Conf.get_instance() self.conf.load_from(conf_path) if self.conf.debug: self.__LOG_MANAGER.enable_debug_level()
def load_from(self, config_file: str) -> None: """ :param config_file: :raise: SyntaxError if there is a syntax error in configuration file """ Validation.is_file_readable( config_file, f"File '{config_file}' *must* exists and be readable") self.__config_parser.read(config_file) # section GENERAL self.__put_str(Conf.K_TMP, Conf.S_GENERAL, Conf.K_TMP, Conf.V_DEFAULT_TMP) self.__put_bool(Conf.K_DEBUG, Conf.S_GENERAL, Conf.K_DEBUG, Conf.V_DEFAULT_DEBUG) # section TRAINING self.__put_str(Conf.K_DATASET_TRAIN, Conf.S_TRAINING, Conf.K_DATASET_TRAIN, Conf.V_DEFAULT_DATASET_TRAIN) self.__put_float(Conf.K_DATASET_TEST_RATIO, Conf.S_TRAINING, Conf.K_DATASET_TEST_RATIO, Conf.V_DEFAULT_DATASET_TEST_RATIO) self.__put_int(Conf.K_RNG_SEED, Conf.S_TRAINING, Conf.K_RNG_SEED, Conf.V_DEFAULT_RNG_SEED) self.__put_bool(Conf.K_CHARTS_COMPUTE, Conf.S_TRAINING, Conf.K_CHARTS_COMPUTE, Conf.V_DEFAULT_CHARTS_COMPUTE) self.__put_bool(Conf.K_CHARTS_SAVE, Conf.S_TRAINING, Conf.K_CHARTS_SAVE, Conf.V_DEFAULT_CHARTS_SAVE) self.__put_int(Conf.K_JOBS, Conf.S_TRAINING, Conf.K_JOBS, Conf.V_DEFAULT_JOBS) self.__put_bool(Conf.K_CLASSIFIER_DUMP, Conf.S_TRAINING, Conf.K_CLASSIFIER_DUMP, Conf.V_DEFAULT_CLASSIFIER_DUMP) # section MOBD self.__put_tuple(Conf.K_BENCHMARK_BEST_FOUND, Conf.S_MOBD, Conf.K_BENCHMARK_BEST_FOUND, Conf.V_DEFAULT_BENCHMARK_BEST_FOUND) self.__put_tuple(Conf.K_BENCHMARK_THRESHOLD, Conf.S_MOBD, Conf.K_BENCHMARK_THRESHOLD, Conf.V_DEFAULT_BENCHMARK_THRESHOLD) self.__put_str(Conf.K_DATASET_TEST, Conf.S_MOBD, Conf.K_DATASET_TEST, Conf.V_DEFAULT_DATASET_TEST)
def load(cls, log_filename: str = None) -> None: if log_filename is not None: from pathlib import Path from util import Validation parent_directory = Path(log_filename).parent Validation.is_dir_writeable( parent_directory, f"Directory '{parent_directory}' must exists and be writable") with cls.__LOCK: # formatters cls._FORMATTER = LogManager.__configure_formatter() # handlers cls._HANDLER_CONSOLE = LogManager.__configure_handler_console() if log_filename is not None: LogManager._LOG_FILENAME = log_filename cls._HANDLER_FILE = LogManager.__configure_handler_file( log_filename) # loggers cls._LOGGER_ROOT = LogManager.__configure_logger_root() cls._LOGGER_OBSERVER = LogManager.__configure_logger_observer() cls._LOGGER_CONVERTER = LogManager.__configure_logger_dispatcher()
def match_rule(self, rule: Rule) -> bool: follow_rule = False # checks if file extension match formats for f in rule.formats: if self.__ext.lower() == f.lower(): follow_rule = True break if not follow_rule: return False # checks if creation/modification file which had triggered event handler # belongs to at least one source in rule object # Note: this works only if file is created in the root directory of source # (recursively search might take too long) for source in rule.sources: try: Validation.path_exists(f"{source}/{self.__file}") return True except FileNotFoundError: pass return False
def __init__(self, conf: Conf): super().__init__() # validate python version Validation.python_version( Evaluator.REQUIRED_PYTHON, f"Unsupported Python version.\n" f"Required Python {Evaluator.REQUIRED_PYTHON[0]}.{Evaluator.REQUIRED_PYTHON[1]} or higher." ) Validation.can_read( conf.dataset_train, f"Training set file *must* exists and be readable. " f"Current file: '{conf.dataset_train}'.\n" f"Training set path (fully qualified) can be specified in conf.ini file or using Conf object." ) Validation.can_read( conf.dataset_test, f"Test set file *must* exists and be readable. " f"Current file: '{conf.dataset_test}'.\n" f"Test set path (fully qualified) can be specified in conf.ini file or using Conf object." ) self.__LOG = LogManager.get_instance().logger(LogManager.Logger.EVAL) self.__conf = conf # using full dataset as training set self.__training = Set(pd.read_csv(self.conf.dataset_train)) # load test set if it has same format as training_set.csv provided # as example file see ./res/dataset/test_set_no_index.csv self.__test = Set(pd.read_csv(self.conf.dataset_test)) # load test set if it has header (F1-20 and CLASS row) and index, so a test test saved using # command pd.to_csv('/path', index=True) # as example file see ./res/dataset/test_set_index.csv # self.__test = Set(pd.read_csv(self.conf.dataset_test, index_col=0)) # load test set if it does not have header row (does not have F1-20 and CLASS row) and # it is was not saved using command pd.to_csv('/path', index=True), so it has not index # as example file see ./res/dataset/test_set_no_index_features.csv # self.__test = Set(pd.read_csv(self.conf.dataset_test, header=None, # names=[f"F{i}" for i in range(1, 21)] + ["CLASS"])) # current classifiers used self.__classifiers = { Evaluator._MULTILAYER_PERCEPTRON: None, Evaluator._SUPPORT_VECTOR_MACHINE: None, # Evaluator._DECISION_TREE: None, Evaluator._RANDOM_FOREST: None, # Evaluator._KNEAREST_NEIGHBORS: None, # Evaluator._STOCHASTIC_GRADIENT_DESCENT: None, Evaluator._ADA_BOOST: None, # Evaluator._NAIVE_BAYES: None, # Evaluator._KMEANS: None }
def exists(self) -> bool: try: Validation.path_exists(self.__filename) return True except FileNotFoundError: return False
# # Gufw is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Gufw; if not, see http://www.gnu.org/licenses for more # information. from util import Validation from controller import Controller from view.guiGufw import GuiGufw if __name__ == "__main__": # Check config file & is running previously appInstance = Validation() # Controller controler = Controller() # Firewall firewall = controler.get_firewall() # Show GUI app = GuiGufw(firewall) # Remove current instance appInstance.exit_application()
def __validate_rules(self) -> None: """ :raise TypeError :raise ValueError """ formats = self.__dispatcher_config.dispatcher_formats sources = self.__dispatcher_config.dispatcher_sources destinations = self.__dispatcher_config.dispatcher_destinations rules = self.__dispatcher_config.dispatcher_rules Validation.is_dict(formats, "Formats not specified") Validation.is_true( len(formats) > 0, "Formats must be at least one" ) Validation.is_dict(sources, "Source directories not specified") Validation.is_true( len(sources) > 0, "Directories to observe must be at least one" ) Validation.is_dict(destinations, "Destination directories not specified") Validation.is_true( len(destinations) > 0, "Destination directories must be at least one" ) Validation.is_dict(rules, "Rules not specified") Validation.is_true( len(rules) > 0, "Rules must be at least one" )
def __check_permissions(self): """ Check permissions on directories before performing the operations :raise ValueError if input directory is equal to output directory :raise NotADirectoryError :raise PermissionError :raise LinksError """ sources = self.__dispatcher_config.dispatcher_sources destinations = self.__dispatcher_config.dispatcher_destinations for source in sources: Validation.is_dir( sources[source], f"Missing input directory '{sources[source]}'" ) Validation.can_read( sources[source], f"Missing read permission on '{sources[source]}'" ) Validation.can_write( sources[source], f"Missing write permission on '{sources[source]}'" ) for destination in destinations: try: Validation.is_dir_writeable( destinations[destination], f"Directory '{destinations[destination]}' *must* exists and be writable" ) except NotADirectoryError: parent_directory = Path(destinations[destination]).parent Validation.can_write( parent_directory, f"Missing write permission on '{parent_directory}'" ) FileObserver.__LOG.info(f"Creating missing destination directory '{destinations[destination]}'") # create if not exists Path(destinations[destination]).mkdir(parents=True, exist_ok=True) for source in sources: Validation.are_symlinks( sources[source], destinations[destination], f"Input ('{sources[source]}') and output ('{destinations[destination]}') directory can not be the same (or symlinks)" )