def capture_stderr_call(self): old_stderr = sys.stderr listener = io.StringIO() sys.stderr = listener log = Logger().logger log.critical('SomeString') log.error('SomeString') out = listener.getvalue() sys.stderr = old_stderr self.assertGreater(len(out), 0) return out
def test_stdout(self): print('Levels WARNING, ERROR and CRITICAL not present in stdout') old_stdout = sys.stdout listener = io.StringIO() sys.stdout = listener log = Logger().logger log.warning(self.not_present_string) log.error(self.not_present_string) log.critical(self.not_present_string) out = listener.getvalue() sys.stdout = old_stdout self.assertNotIn(self.not_present_string, out)
class PostVEPProcessingValidator: def __init__(self, model): self.model = model self.log = Logger().logger def validate_features_present(self, datafile): """ Validator to see if all features within the model impute values are presently processed. """ column_utils = ColumnUtils() column_utils.set_specified_columns(self.model.impute_values.keys()) features_not_present = column_utils.get_missing_diff_with( datafile.columns) if len(features_not_present) > 0: error_message = 'Detected required feature(s) %s not ' \ 'present within VEP processed input file!' self.log.critical(error_message, ', '.join(features_not_present)) raise KeyError(error_message % ', '.join(features_not_present))
class DynamicLoader: def __init__(self, required_attributes: list, path): """ Dynamic Loader for both the imputer and preprocessor :param required_attributes: list, list containing all the required attritubes the loaded modules have to have. :param path: Path-like, path to the potential modules. Use `load_impute_preprocess_modules()` to load the modules required for the imputer and preprocessor. Use `load_manual_annotators()` to load the manual VEP annotation processors. """ self.log = Logger().logger self.path = path self._check_dir_exists() self.required_attributes = required_attributes self.modules = {} def load_manual_annotators(self): """ Load the VEP annotation modules within path. :return: list, list containing all the usable VEP modules within path. :raises: FileNotFoundError, if no VEP annotation module is found within path. """ self._load_modules() # Since the manual annotator doesn't require VEP version, GRCh build or # overwrite, this loading is done. return self.modules.values() def _load_modules(self, required_attributes=None): self._check_dir_exists() if required_attributes: set_required = required_attributes else: set_required = self.required_attributes modules = self._load_modules_from_path(self.path) self._check_n_modules(modules) imported_modules = self._import(modules) for path, module in imported_modules.items(): if all(item in dir(module) for item in set_required): self.modules[path] = module self._check_n_modules(self.modules) self.log.info('Successfully loaded %s modules.', len(self.modules)) def _check_dir_exists(self): if not os.path.exists(self.path): error_message = "%s is not a path!" self.log.critical(error_message, self.path) raise OSError(error_message % self.path) def _check_n_modules(self, modules_dict): if len(modules_dict) < 1: self._raise_no_module_found_error() def _raise_no_module_found_error(self): error_message = "No usable modules are found within %s!" self.log.critical(error_message, self.path) raise FileNotFoundError(error_message % self.path) @staticmethod def _load_modules_from_path(path): """ Function to dynamically load in modules in the given path :param path: path to the modules :return: list """ modules = [] for module in os.listdir(path): module = os.path.join(path, module) if (module.endswith('.py') and not module.endswith('__.py') and not module.endswith('abstract.py')): modules.append(module) return modules def _import(self, usable_modules: list): """ Function to dynamically load in the modules using the import_module library. :param usable_modules: list of absolute paths to potential modules :return: list of usable modules """ return_modules = {} for module in usable_modules: name = os.path.basename(module).split('.py')[0] spec = util.spec_from_file_location(name=name, location=module) loaded_module = self._process_spec(spec) if loaded_module and module not in return_modules.keys(): return_modules[module] = loaded_module return return_modules @staticmethod def _process_spec(spec): return_spec = None loaded_spec = util.module_from_spec(spec) spec.loader.exec_module(loaded_spec) for attribute in dir(loaded_spec): if not attribute.startswith('Template') and not attribute.startswith('__'): get_attribute = getattr(loaded_spec, attribute) if ('name' in dir(get_attribute) and 'usable' in dir(get_attribute) and get_attribute().usable is True): return_spec = get_attribute() return return_spec
class PostFileParseValidator: def __init__(self): self.log = Logger().logger def validate_n_columns(self, dataset): """ Validator to make sure that at least 4 columns are loaded (chr, pos, ref, alt). Does NOT check for the names of these columns! """ if isinstance(dataset, pd.Series) or not dataset.shape[1] >= 4: error_message = 'Loaded dataset does NOT have enough features! ' \ 'Is there a header present that does not start ' \ 'with ##?' self.log.critical(error_message) raise KeyError(error_message) def validate_variants_present(self, dataset): """ Validator to make sure that there is at least one variant present. """ if dataset.shape[0] == 0: error_message = 'Loaded dataset does not contain variants!' self.log.critical(error_message) raise ValueError(error_message) def validate_minimally_required_columns( self, dataset, additional_required_features: list = None): """ Validator for both predict and train to check if the very least columns are present (chr, pos, ref, alt) and additionally the additional required columns. """ column_utils = ColumnUtils() column_utils.set_specified_columns([ Column.chr.value, Column.pos.value, Column.ref.value, Column.alt.value, ]) if additional_required_features is not None: column_utils.add_to_specified_columns(additional_required_features) columns_not_present = column_utils.get_missing_diff_with( dataset.columns) if len(columns_not_present) > 0: error_message = 'Detected required column %s not present within input dataset!' if len(columns_not_present) > 1: error_message = 'Detected required columns %s not present within input dataset!' self.log.critical(error_message, ', '.join(columns_not_present)) raise KeyError(error_message % ', '.join(columns_not_present)) def validate_chrom_pos(self, dataset): """ Function to check if all values of the columns Chr and Pos are present. """ if dataset[Column.chr.value].isnull().values.any(): error_message = 'Detected gap in Chromosome column! Please supply a valid dataset.' self.log.critical(error_message) raise ValueError(error_message) if dataset[Column.pos.value].isnull().values.any(): error_message = 'Detected gap in Position column! Please supply a valid dataset.' self.log.critical(error_message) raise ValueError(error_message)