def load_plugins(self, path, excludes=[]): """finds all plugins in dir and it's subdirectories""" pyfiles = path.rglob('*.py') for pyfile in pyfiles: splitted_path = self._os_path_split(str(pyfile)[:-3]) path_to_file = '.'.join(splitted_path[:-1]) imported_path = __import__(path_to_file, fromlist=[splitted_path[-1]]) imported_file = getattr(imported_path, splitted_path[-1]) plugin_file = imported_file.__name__ for var_name in dir(imported_file): attr = getattr(imported_file, var_name) if inspect.isclass(attr) and attr != Plugin and \ issubclass(attr, Plugin): try: plugin = attr() plugin_name = plugin.__class__.__name__ if plugin_name not in excludes: self.plugins.add(plugin) logging.info("plugin %s loaded (%s)" % (plugin_name, plugin_file)) else: logging.info("plugin %s was excluded (%s)" % (plugin_name, plugin_file)) except Exception: logging.warn("plugin %s not loaded (%s)" % (plugin_name, plugin_file))
def load_plugins(self, path, excludes=[]): """finds all plugins in dir and it's subdirectories""" pyfiles = path.rglob('*.py') for pyfile in pyfiles: splitted_path = _os_path_split(str(pyfile)[:-3]) path_to_file = '.'.join(splitted_path[:-1]) imported_path = __import__( path_to_file, fromlist=[splitted_path[-1]]) imported_file = getattr(imported_path, splitted_path[-1]) plugin_file = imported_file.__name__ for var_name in dir(imported_file): attr = getattr(imported_file, var_name) if inspect.isclass(attr) and attr != Plugin and \ issubclass(attr, Plugin): try: plugin = attr() plugin_name = plugin.__class__.__name__ if not plugin_name in excludes: self.plugins.add(plugin) logging.info("plugin %s loaded (%s)" % (plugin_name, plugin_file)) else: logging.info("plugin %s was excluded (%s)" % (plugin_name, plugin_file)) except Exception: logging.warn("plugin %s not loaded (%s)" % (plugin_name, plugin_file))
def check_mtime(path, tstamp): for f in path.rglob("*"): mtime = f.stat().st_mtime if mtime > tstamp: raise RuntimeError( 'File %s was changed after backup started (%f > %f)!' % (str(f), mtime, tstamp))
def match_directory(path, move_files=False, recursive=True): # type: (Path, bool, bool) -> None """ Scans directory `path` for files known to QBittorrent and either adjusts the fastresume files to have the correct path, or moves the files to the path specified in the fastresume files. """ qb = QBittorrentMeta() if recursive: it = path.rglob("*") else: it = path.glob("*") if move_files: for p in it: try: if qb.move_single_file(p): logger.info("Moved file %s", p) else: logger.info("File %s already in destination", p) except NotFound: logger.debug("Did not find torrent file for %s", p) else: for p in it: try: if qb.single_file_moved(p): logger.info("Adjusted torrent path for %s", p) else: logger.info("Torrent path already correct for %s", p) except NotFound: logger.debug("Did not find torrent file for %s", p)
def get_set_of_all_files(path: Path): set_of_all_files = { Path(file) for file in path.rglob('*') if Path(file).is_file() } return set_of_all_files
def from_path(cls, path: Path): """ Computes hash list for a given path into output gzipped file """ path = Path(path).resolve() r = dict() log.info(_("Generate hashlist from directory at {}").format(str(path))) for fp in path.rglob("*"): # File path if fp.is_file(): hh = file_hash(fp) zz = file_permissions_octal(fp) fpr = fp.relative_to(path) # File Path Relative r[fpr] = (hh, zz) elif not fp.is_dir(): log.warning(_("Ignoring non regular file {}").format(str(fp))) return cls(base_path=path, files=r)
def _rglob_with_self(path: Path, pattern: str) -> Iterable[Path]: if fnmatch.fnmatch(path.name, pattern): yield path return yield from path.rglob(pattern)