def home(): web_path = request.args.get('path') or '' if request.method == 'POST': if request.form['action'] == 'create': f = request.files['upfile'] if f: fname = secure_filename(f.filename) fpath = os.path.join(app.config['ROOT_PATH'], web_path, fname) f.save(fpath) elif request.form['action'] == 'delete': fname = request.form['file_name'] fpath = request.form['root_path'] full_path = os.path.join(app.config['ROOT_PATH'], fpath, fname) if os.path.exists(full_path): os.remove(full_path) actual_path = os.path.join(app.config['ROOT_PATH'], web_path) path = Path(actual_path) dirs = { n.name: os.path.join(web_path, n.name) for n in path.iterdir() if n.is_dir() } files = { n.name: os.path.join(web_path, n.name) for n in path.iterdir() if n.is_file() } return render_template("home.html", current_dir=web_path, parent_dir=os.path.dirname(web_path), dirs=dirs, files=files)
def home(): web_path = request.args.get('path') or '' if request.method == 'POST': if request.form['action'] == 'create': f = request.files['upfile'] if f: fname = secure_filename(f.filename) fpath = os.path.join(app.config['ROOT_PATH'], web_path, fname) f.save(fpath) elif request.form['action'] == 'delete': fname = request.form['file_name'] fpath = request.form['root_path'] full_path = os.path.join(app.config['ROOT_PATH'], fpath, fname) if os.path.exists(full_path): os.remove(full_path) actual_path = os.path.join(app.config['ROOT_PATH'], web_path) path = Path(actual_path) dirs = {n.name: os.path.join(web_path, n.name) for n in path.iterdir() if n.is_dir()} files = {n.name: os.path.join(web_path, n.name) for n in path.iterdir() if n.is_file()} return render_template("home.html", current_dir=web_path, parent_dir=os.path.dirname(web_path), dirs=dirs, files=files)
def count_lines_and_files(paths_lines=None, paths_files=None, line_glob=None, file_glob=None) -> dict: """Counts lines and files in the given paths.""" result = {} for path in arg_to_iter(paths_lines): path = Path(path).resolve() if path.is_dir(): files = path.glob(line_glob) if line_glob else path.iterdir() elif path.is_file(): files = (path, ) else: files = () for file in files: LOGGER.info("Counting lines in <%s>...", file) name = os.path.splitext(file.name)[0] result[f"lc_{name}"] = count_lines(file) for path in arg_to_iter(paths_files): path = Path(path).resolve() if not path.is_dir(): continue for subdir in path.glob("**"): LOGGER.info("Counting files in <%s>...", subdir) if path == subdir: name = path.name else: relative = subdir.relative_to(path) name = "_".join(relative.parts) result[f"fc_{name}"] = count_files(subdir, glob=file_glob) return result
def worker() -> None: nonlocal tasks while True: with lock: while True: if not tasks: output.append(None) on_output.notify() return if not paths: on_input.wait() continue path = paths.pop() break try: for p in path.iterdir(): if p.is_dir(): with lock: tasks += 1 paths.append(p) on_input.notify() else: item = func(p) if func is not None else p with lock: output.append(item) on_output.notify() except Exception: log.exception("Error scanning directory %s", path) finally: with lock: tasks -= 1 if not tasks: on_input.notify_all()
def folder(raw_path, expandvars=False, exists=False, reset=False, touch=False): if expandvars: raw_path = os.path.expandvars(raw_path) path = Path(raw_path) if exists: if not path.exists(): raise FileNotFoundError(f'{raw_path} not found.') if not path.is_dir(): raise NotADirectoryError(f'{raw_path} should be a folder.') if reset: if path.exists(): # Remove children instead. for child in path.iterdir(): if child.is_dir(): try: shutil.rmtree(child) except OSError: logging.warning(f'Cannot remove folder {child}.') else: child.unlink() else: os.makedirs(path, exist_ok=True) if touch: os.makedirs(path, exist_ok=True) return path
def tree(path, dirs_only=False, max_depth=0, _depth=0): path = Path(path) lst = path.lstat() is_symlink = stat.S_ISLNK(lst.st_mode) st = lst if is_symlink else path.stat() is_dir = stat.S_ISDIR(st.st_mode) if is_symlink: size = 0 elif is_dir: size = functools.reduce(operator.add, [ tree(p, dirs_only=dirs_only, max_depth=max_depth, _depth=_depth+1) for p in sorted(path.iterdir()) ], 0) else: size = lst.st_size if (is_dir or not dirs_only) and \ (not max_depth or _depth <= max_depth): p = str(path) if is_dir: p += os.path.sep if is_symlink: p += ' -> ' + os.readlink(str(path)) print('%10s %s' % (format_size(size), p)) return size
def _glob_resources( resource_path: pathlib.Path, subdir: str, ext: str, ) -> Iterable[str]: """Find resources with the given extension. Yields a resource name like "html/log.html" (as string). """ assert '*' not in ext, ext assert ext.startswith('.'), ext path = resource_path / subdir if isinstance(resource_path, pathlib.Path): for full_path in path.glob(f'*{ext}'): # . is contained in ext yield full_path.relative_to(resource_path).as_posix() else: # zipfile.Path or importlib_resources compat object # Unfortunately, we can't tell mypy about resource_path being of type # Union[pathlib.Path, zipfile.Path] because we set "python_version = 3.6" in # .mypy.ini, but the zipfiel stubs (correctly) only declare zipfile.Path with # Python 3.8... assert path.is_dir(), path # type: ignore[unreachable] for subpath in path.iterdir(): if subpath.name.endswith(ext): yield posixpath.join(subdir, subpath.name)
def is_dir_empty(path, /): path = mk_Path(path) for _ in path.iterdir(): return False else: return True
def render(path): assert path.is_dir() index_dirs = sorted([f for f in path.iterdir() if f.is_dir()]) indexes = indent(text=''.join(render_index(path) for path in index_dirs), by=3) return template.format(indexes=indexes)
def iterdir(self): visited = [] for path in self._paths: for file in path.iterdir(): if file.name in visited: continue visited.append(file.name) yield file
def _get_compile_commands(path, command_directory): for f in path.iterdir(): if f.is_dir(): yield from _get_compile_commands(f, command_directory) elif f.name.endswith('_compile_command'): command = _get_command(f, command_directory) if command: yield command
def _rmdir_recursive(path: Path): try: for child in path.iterdir(): if child.is_file(): child.unlink() else: CodeGeneratorTests._rmdir_recursive(path) path.rmdir() except Exception as e: logger.debug(f"{repr(e)}: when removing dir: {path}")
def saveListPath(fw, path): for file in path.iterdir(): if file.is_dir(): saveListPath(fw, file) else: hashes = caculateHash(str(file)) print("Writing...." + str(file) + "\n") fw.write(hashes[0] + "\t" + hashes[1] + "\t" + str(file.absolute()) + "\n") return
def load_notes(path=Path(config.DATA)): if path.exists(): data = [Note(f) for f in path.iterdir() if f.is_file() and f.suffix not in config.IGNORE_EXTENSIONS] return sorted(data, key=lambda n: n.age) else: echo_error('Your data directory does not exist!') click.echo('Please create a data directory.') click.echo('You can do this with "mkdir {}".'.format(config.DATA)) exit(1)
def list_files( path: Union[Path, str], extensions: Optional[List[str]] = None, ) -> List[Path]: path = make_path(path) files = sorted(p for p in path.iterdir() if p.is_file()) if not extensions: return list(files) return [p for p in files if p.suffix in extensions]
def fileSearch(self, path: Path): """Goes through CORPUS, adds each file that needs to be indexed""" filesToIndex = []; for directory in path.iterdir(): try: if(directory.is_dir()): for item in directory.iterdir(): filesToIndex.append(item) except: print("Error while adding files to index") return filesToIndex
def walk(path): for fx in path.iterdir(): if fx.is_dir(): if fx.is_symlink() and fx.parent.name != "modules": continue if _DIR_EXCLUDE_RE.fullmatch(fx.name): continue for child in walk(fx): yield child elif fx.is_file() and _FILE_INCLUDE_RE.fullmatch(fx.name): yield fx
def match_best_version(target_name, target_version, path): path = path if not isinstance(path, basestring) else pathlib.Path(path) matches = [] for data_name in path.iterdir(): name, version = split_data_name(data_name.parts[-1]) if name == target_name and constraint_match(target_version, version): matches.append( (tuple(float(v) for v in version.split('.')), data_name)) if matches: return pathlib.Path(max(matches)[1]) else: return None
def match_best_version(target_name, target_version, path): path = path if not isinstance(path, basestring) else pathlib.Path(path) if not path.exists(): return None matches = [] for data_name in path.iterdir(): name, version = split_data_name(data_name.parts[-1]) if name == target_name and constraint_match(target_version, version): matches.append((tuple(float(v) for v in version.split('.')), data_name)) if matches: return pathlib.Path(max(matches)[1]) else: return None
def _get_compile_commands(path, command_directory): """ Args: path: A directory pathlib.Path to look for _compile_command files under. command_directory: The directory commands are run from. Yields strings to stick in compile_commands.json. """ for f in path.iterdir(): if f.is_dir(): yield from _get_compile_commands(f, command_directory) elif f.name.endswith("_compile_command"): command = _get_command(f, command_directory) if command: yield command
def resolve_hooks( hooks: Iterable[str], hook_type: HookType, ) -> abc.Generator[pathlib.Path, None, None]: for hook in hooks: path = pathlib.Path(hook).expanduser().resolve(strict=False) if path.is_file() and os.access(path, os.R_OK | os.X_OK): yield path elif path.is_dir(): for f in path.iterdir(): if (f.is_file() and f.name.startswith("%s_" % hook_type.name) and os.access(f, os.R_OK | os.X_OK)): yield f else: log.warning("Not usable hook: %s", hook)
def main(): in_dp = input("input directory: ") out_dp = input("output directory [{}]: ".format(DEFAULT_OUT_DIR)) out_dp = out_dp or DEFAULT_OUT_DIR os.mkdir(out_dp) path = pathlib.Path(in_dp) total = len(os.listdir(path)) index = 0 for file in tqdm.tqdm(path.iterdir(), total=total): try: im = PIL.Image.open(file) except OSError: continue fp = os.path.join(out_dp, "{}.webp".format(index)) im.save(fp) index += 1
def upload(path): filelist = {f.name: f.open().read() for f in path.iterdir() if f.is_file()} document = {"branch": args.branch, "modules": filelist} r = requests.post( 'https://screeps.com/api/user/code', auth=(login, password), json=document, ) if r.status_code != 200: sys.exit("%s: HTTP error %s\n%s" % (sys.argv[0], r.status_code, r.json())) else: print(r.json(), file=sys.stderr)
def get_file_list(self, path): "List entries in `path` directory" lst = None if self.fs_engine == 'pathlib': lst = self.sort_paths([ i for i in path.iterdir() if self.filter != 'dirs' or i.is_dir() ]) elif self.fs_engine == 'qt': qdir = QtCore.QDir(str(path)) qdir.setFilter(qdir.NoDotAndDotDot | qdir.Hidden | ( qdir.Dirs if self.filter == 'dirs' else qdir.AllEntries)) names = qdir.entryList(sort=QtCore.QDir.DirsFirst | QtCore.QDir.LocaleAware) lst = [str(path / i) for i in names] return lst
def _run_id_iterator(slug, limit): try: path = Path(slug) except FileNotFoundError: log.warn('pipeline slug dir %s not exists', slug) return [] ids = [ x.name for x in path.iterdir() if x.is_dir() and _is_valid_uuid(x.name) ] ids = sorted(ids, key=lambda x: os.path.getmtime(os.path.join(slug, x)), reverse=True) if limit > 0: return ids[:limit] return ids
def load_notes(path=config.DATA_PATH): """ Scans the given path and returns a list of notes which is sorted by the modification time. Any directory and the tagfile is ignored. Die path argument has to be an instance of pathlib.Path. """ if path.exists(): data = [Note(f) for f in path.iterdir() if f.is_file() and (f.suffix not in config.IGNORE_EXTENSIONS and f != Note.tagfile)] return sorted(data, key=lambda n: n.age) else: error('The directory {} does not exist!'.format(path)) click.echo('Creating new directory {}.'.format(path)) path = config.TRASH_PATH path.mkdir(parents=True) exit(1)
def cleanup(path, incl=None): clean = True for sub in path.iterdir(): if incl is not None: if sub not in incl: continue iclean = True if sub.is_dir(): iclean = cleanup(sub) if sub not in mpoints: if iclean: if sub.is_dir(): sub.rmdir() else: sub.unlink() else: iclean = False clean = clean and iclean return clean
def findResourcePaths(self, resourceName, preferredPath=''): """Return list of potential non-empty pathlib objects for the resource. List includes preferred, module and user option paths. Arguments: resourceName -- the typical name of the resource directory preferredPath -- add this as the second path if given """ # use abspath() - pathlib's resolve() can be buggy with network drives modPath = pathlib.Path(os.path.abspath(sys.path[0])) if modPath.is_file(): modPath = modPath.parent # for frozen binary pathList = [modPath / '..' / resourceName, modPath / resourceName] if options.Options.basePath: basePath = pathlib.Path(options.Options.basePath) pathList.insert(0, basePath / resourceName) if preferredPath: pathList.insert(1, pathlib.Path(preferredPath)) return [ pathlib.Path(os.path.abspath(str(path))) for path in pathList if path.is_dir() and list(path.iterdir()) ]
def _get_compile_commands(path, command_directory, autogen_headers=None): '''Traverse a path and returns parsed command JSON strings. Args: path: A directory pathlib.Path to look for _compile_command files under. command_directory: The directory commands are run from. Yields: strings to stick in compile_commands.json. ''' if autogen_headers is None: autogen_headers = [] for f in path.iterdir(): if f.is_dir(): yield from _get_compile_commands(f, command_directory, autogen_headers) elif f.name.endswith('_compile_command'): command = _get_command(f, command_directory) if command: yield command for header in autogen_headers: header_command = _get_header_command(command, header) if header_command != command: yield header_command
def list_dirs(path: Union[Path, str]) -> List[Path]: path = make_path(path) return list(sorted(p for p in path.iterdir() if p.is_dir()))
def count_files(path, glob=None) -> int: """Return the number of files in a given directory.""" path = Path(path) files = path.glob(glob) if glob else path.iterdir() return sum(1 for file in files if file.is_file())
def AggregateByMean(path, outputFileName): for directory in path.iterdir(): if not directory.is_dir(): continue HandleMunicipalityDir(directory, outputFileName)
def AggregateByThreshold(path, valueType): for directory in path.iterdir(): if not directory.is_dir(): continue HandleMunicipalityDir(directory, valueType) print(f"MinCoverage: {MinCoverage}")
def AddThresholdValues(path, valueType, inputFileName, outputFileName): for directory in path.iterdir(): if not directory.is_dir(): continue HandleMunicipalityDir(directory, valueType, inputFileName, outputFileName)