def genFileTree(widget, pathobj, expandAbovePathName=None): """ Construct the file tree :param widget: Initial object is root TreeWidget :param pathobj: Root directory that contains files that show up in the file tree :param expandAbovePathName: Specifies path of a new file so directories can be expanded to reveal the file :return: """ childrange = range(widget.childCount()) for path in pathobj.iterdir(): if str(path) in [widget.child(p).path for p in childrange ]: #check if tree item already exists for childind in childrange: if widget.child(childind).path == str(path): if path.is_dir(): genFileTree(widget.child(childind), path, expandAbovePathName) else: #otherwise make a new tree item. if path.parts[-1].split('.')[-1] == 'py': child = TreeItem() child.setText(0, str(path.parts[-1])) child.path = str(path) child.isdir = False widget.addChild(child) if not expandAbovePathName is None and path == Path( expandAbovePathName ): # expand directories containing a new file expandAboveChild(widget) elif path.is_dir() and len(list(path.glob('**/*.py'))): child = TreeItem() child.setText(0, str(path.parts[-1])) child.path = str(path) widget.addChild(child) genFileTree(child, path, expandAbovePathName) widget.sortChildren(0, 0)
def find_branch(): 'returns a pair: branch string (needs to be stripped) and mode suffix.' if is_file(gd + '/rebase-merge/interactive'): return open(gd + '/rebase-merge/head-name').read(), '|REBASE-i' if is_dir(gd + '/rebase-merge'): return open(gd + '/rebase-merge/head-name').read(), '|REBASE-m' # determine suffix first. if is_dir(gd + '/rebase-apply'): if is_file(gd + '/rebase-apply/rebasing'): s = '|REBASE' elif is_file(gd + '/rebase-apply/applying'): s = '|AM' else: s = '|AM/REBASE' elif is_file(gd + '/MERGE_HEAD'): s = '|MERGE' elif is_file(gd + '/CHERRY_PICK_HEAD'): s = '|CHERRY-PICK' elif is_file(gd + '/BISECT_LOG'): s = '|BISECT' else: s = '' c, b = runCO('git symbolic-ref HEAD') if c == 0: return b, s # detached. c, b = runCO('git describe --contains --all HEAD') if c == 0: return b, s # last option. try: head_sha = open(gd + '/HEAD').read()[:8] except FileNotFoundError: head_sha = 'unknown' return '({})'.format(head_sha), s
def get_tree_size(path: Path) -> int: """Compute the total files size from a given folder *path*. Note: this function cannot be decorated with lru_cache(). """ size = 0 try: path.is_dir() except OSError: log.warning(f"Error calling is_dir() on: {path!r}", exc_info=True) return size with os.scandir(path) as it: for entry in it: try: is_dir = entry.is_dir() except OSError: log.warning(f"Error calling is_dir() on: {entry.path!r}", exc_info=True) continue if is_dir: size += get_tree_size(Path(entry.path)) elif entry.is_file(): size += entry.stat().st_size return size
def count_lines_and_files(paths_lines=None, paths_files=None, line_glob=None, file_glob=None) -> dict: """Counts lines and files in the given paths.""" result = {} for path in arg_to_iter(paths_lines): path = Path(path).resolve() if path.is_dir(): files = path.glob(line_glob) if line_glob else path.iterdir() elif path.is_file(): files = (path, ) else: files = () for file in files: LOGGER.info("Counting lines in <%s>...", file) name = os.path.splitext(file.name)[0] result[f"lc_{name}"] = count_lines(file) for path in arg_to_iter(paths_files): path = Path(path).resolve() if not path.is_dir(): continue for subdir in path.glob("**"): LOGGER.info("Counting files in <%s>...", subdir) if path == subdir: name = path.name else: relative = subdir.relative_to(path) name = "_".join(relative.parts) result[f"fc_{name}"] = count_files(subdir, glob=file_glob) return result
def get_tree_list(path: Path, remote_ref: str) -> Generator[Tuple[str, Path], None, None]: """Compute remote paths based on *remote_ref* from a given *path*. This is used in the Direct Transfer feature to upload a folder and all its contents. Each entry will yield a tuple (remote_path, local_path). This order is important as it will be used in get_tree_list_sorted() to retrieve the sorted list to trait. Note: this function cannot be decorated with lru_cache(). """ try: path.is_dir() except OSError: log.warning(f"Error calling is_dir() on: {path!r}", exc_info=True) return # First, yield the folder itself yield remote_ref, path remote_ref += f"/{path.name}" # Then, yield its children with os.scandir(path) as it: for entry in it: try: is_dir = entry.is_dir() except OSError: log.warning(f"Error calling is_dir() on: {entry.path!r}", exc_info=True) continue if is_dir: yield from get_tree_list(Path(entry.path), remote_ref) elif entry.is_file(): yield remote_ref, Path(entry.path)
def genFileTree(widget, pathobj, expandAbovePathName=None, onlyIncludeDirsWithPyFiles=False): """ Construct the file tree :param widget: Initial object is root TreeWidget :param pathobj: Root directory that contains files that show up in the file tree :param expandAbovePathName: Specifies path of a new file so directories can be expanded to reveal the file :return: """ childrange = range(widget.childCount()) for path in pathobj.iterdir(): if str(path) in [widget.child(p).path for p in childrange]: #check if tree item already exists for childind in childrange: if widget.child(childind).path == str(path): if path.is_dir(): genFileTree(widget.child(childind), path, expandAbovePathName) else: #otherwise make a new tree item. if path.parts[-1].split('.')[-1] == 'py': child = TreeItem() child.setText(0, str(path.parts[-1])) child.path = str(path) child.isdir = False child.setFlags(QtCore.Qt.ItemIsEnabled | QtCore.Qt.ItemIsDragEnabled | QtCore.Qt.ItemIsSelectable) child.setIcon(0,QtGui.QIcon( ":/openicon/icons/edit-shred.png")) widget.addChild(child) if not expandAbovePathName is None and path == expandAbovePathName: # expand directories containing a new file expandAboveChild(widget) elif path.is_dir() and not path.match('*/__*__*') and (not onlyIncludeDirsWithPyFiles or len(list(path.glob('**/*.py')))): child = TreeItem() child.setText(0, str(path.parts[-1])) child.path = str(path) child.setFlags(QtCore.Qt.ItemIsDropEnabled | QtCore.Qt.ItemIsEnabled | QtCore.Qt.ItemIsDragEnabled | QtCore.Qt.ItemIsSelectable) child.setIcon(0,QtGui.QIcon( ":/openicon/icons/document-open-5.png")) widget.addChild(child) genFileTree(child, path, expandAbovePathName) widget.sortChildren(0, 0)
def contents(top_path, include_files=True, include_hidden=False): ''' Returns a list with the paths of the immediate contents of top_path directory. If top_path is a file, returns an empty list. Keyword arguments: include_files : whether include files or only directories. include_hidden : whether include hidden files and hidden directories ''' return [top_path+"/"+i for i in listdir(top_path) if ((is_dir(top_path+"/"+i) or include_files) and (not is_hidden(top_path+"/"+i) or include_hidden))] \ if is_dir(top_path) else []
def _do_generate_directory(self, sources: Tuple[pathlib.Path, ...], destination: pathlib.Path): destination_base = destination logger.info("Generating %s from %s", destination, ', '.join(map(str, sources))) for base, source in yield_all_sources(*sources): destination = destination_base / source.relative_to(base) if source.is_symlink(): self._create_symlink(source, destination) elif source.is_dir(): if destination.exists(): logger.debug("Clearing directory %s", destination) # Clear destination directory contents if destination.is_symlink() or not destination.is_dir(): raise GeneratorError( "Destination {} is not a directory".format( destination)) for path in destination.iterdir(): if path.is_dir(): shutil.rmtree(str(path)) else: path.unlink() else: logger.debug("Creating directory %s", destination) destination.mkdir(self.dir_mode, exist_ok=True) shutil.copystat(str(source), str(destination), follow_symlinks=False) self._setstat(destination, self.dir_mode) else: if source.suffix == self.TEMPLATE_SUFFIX: destination = destination.with_name(destination.stem) self._generate_template_to_file(base, source, destination) else: self._copy_to_file(source, destination)
def search_vcf( vcf_dir: Path, recursive: bool = False, exts: List[str] = (".vcf", ".vcf.gz"), ) -> Generator[str, str, None]: """ Iterate over a directory and search for vcf files (or any file ending with given extension list) Parameters: vcf_dir Path Path to the vcf directory in which to search recursive bool A boolean, weather to search recursively in sub-directories (True) or not (False) exts List[str] A list of extensions used to identify vcf (or any other file) Return: Generator[str, str, None] A Generator of paths """ for path in vcf_dir.iterdir(): if path.is_dir(): if recursive is True: yield from search_vcf(path, recursive, exts) else: continue if path.name.endswith(exts): yield path
def _handle_modified(path: pl.Path) -> None: if path.is_dir() or path.suffix != ".md": return _init_autofix_regexps() old_file_id = file_id(path) ctx = litprog.parse.parse_context([path]) fixed_elements = list(_iter_fixed_elements(ctx)) if fixed_elements == ctx.elements: return fixed_content = "".join( # line number prefix for debugging # "".join(f"{l.line_no:03d} " + l.val for l in elem.lines) "".join(l.val for l in elem.lines) for elem in fixed_elements) tmp_path = pl.Path(path.parent, path.name + ".tmp") with tmp_path.open(mode="w") as fh: fh.write(fixed_content) new_file_id = file_id(path) if old_file_id == new_file_id: # nothing changed -> we can update with the fix tmp_path.rename(path) else: tmp_path.unlink() print("updated", path)
def load_scripts(path): if not isinstance(path, pathlib.Path): path = str(path) path = os.path.expanduser(path) path = pathlib.Path(path) if path.is_dir(): l = path.glob('*.py') # sort b.50.py before a.80.py l = sorted(l, key=lambda p: [p.suffixes[-2][1:] if len(p.suffixes)>1 else "",p]) for p in l: load_scripts(p) elif path.is_file(): with path.open("rt") as f: c = f.read() c = compile(c, str(path), "exec") try: env.Env['__FILE__'] = str(path.absolute()) exec (c, env.Env.dict) del env.Env['__FILE__'] except NotImplementedError: # Script wanted to be ignored pass SCRIPTS.append(path) else: raise Exception("neither file nor dir in load_Scripts", path)
def _do_generate_directory(self, sources: Tuple[pathlib.Path, ...], destination: pathlib.Path): destination_base = destination logger.info("Generating %s from %s", destination, ', '.join(map(str, sources))) for base, source in yield_all_sources(*sources): destination = destination_base / source.relative_to(base) if source.is_symlink(): self._create_symlink(source, destination) elif source.is_dir(): if destination.exists(): logger.debug("Clearing directory %s", destination) # Clear destination directory contents if destination.is_symlink() or not destination.is_dir(): raise GeneratorError("Destination {} is not a directory" .format(destination)) for path in destination.iterdir(): if path.is_dir(): shutil.rmtree(str(path)) else: path.unlink() else: logger.debug("Creating directory %s", destination) destination.mkdir(self.dir_mode, exist_ok=True) shutil.copystat(str(source), str(destination), follow_symlinks=False) self._setstat(destination, self.dir_mode) else: if source.suffix == self.TEMPLATE_SUFFIX: destination = destination.with_name(destination.stem) self._generate_template_to_file(base, source, destination) else: self._copy_to_file(source, destination)
def _notices_business(self, path): if path.is_dir(): return noticeOK, linesFound = self._noticesEditor.check(path) if noticeOK: if self.verbose: if linesFound is None: print(f'No notice insertion for suffix "{path.suffix}",' f' skipping "{path}"') else: print(f'Notice lines {linesFound} in "{path}"') return 0 if self.verbose or not self.counting: print(f'No copyright notices in file "{path}"') if self.counting: return 1 editedPath, differences = self._noticesEditor.insert(path) overwritten = self._ask_overwrite(editedPath, path, differences) if overwritten: noticeOK, linesFound = self._noticesEditor.check(path) if noticeOK: if self.verbose: print('Overwritten file OK.') else: raise RuntimeError("Overwritten file doesn't have notices.") return 1 if overwritten else 0
def mkdir(self, parents=False, exist_ok=False): """Make a directory at the location specified by |Path|. Args: parents (bool): If ``True``, any missing parents of this path are created as needed. exist_ok (bool): If ``True`` (the default), ``OSError`` is not raised if the target directory already exists. Raises: OSError: If ``parents`` is ``False`` and a parent is missing. If ``exist_ok`` is ``False`` and the target directory already exists. """ if self.ext: path = self[:-1] else: path = self try: if parents: os.makedirs(str(path)) else: os.mkdir(str(path)) except OSError as exc: if exist_ok and (exc.errno == errno.EEXIST and path.is_dir()): pass else: raise exc
def check_status(workspace_path_str): root_path = pathlib.Path(workspace_path_str) for path in root_path.iterdir(): if path.is_dir(): try: repo = git.Repo(path, search_parent_directories=True) except: logging.debug( "**** path {0} was found to not be a git repo. ********". format(path)) continue if repo: repo.git.fetch("--all") current_branch = repo.head.ref.name remote_branch = "origin/{0}".format(current_branch) commits_behind = [ c for c in repo.iter_commits('{0}..{1}'.format( current_branch, remote_branch)) ] num_behind = len(commits_behind) commits_ahead = [ c for c in repo.iter_commits('{0}..{1}'.format( remote_branch, current_branch)) ] num_ahead = len(commits_ahead) ahead_str = "\033[93m{} commits ahead remote\033[0m".format( num_ahead) if num_ahead > 0 else "" behind_str = "\033[93m{} commits behind remote\033[0m".format( num_behind) if num_behind > 0 else "" if num_ahead == 0 and num_behind == 0: ahead_str = "\033[92mUp to date with remote\033[0m" logging.info( "\033[94m{:<40}\033[0m on branch: {:<25} {} {}".format( path.name, current_branch, ahead_str, behind_str)) if repo.is_dirty(): changed_files = [ item.a_path for item in repo.index.diff(None) ] logging.info("\tModified local files:") for file in changed_files: logging.info("\t\t\033[95m{}\033[0m".format(file)) root_logger = logging.getLogger() if root_logger.isEnabledFor(logging.DEBUG) and num_ahead > 0: for commit in commits_ahead: logging.debug("\tCommit ahead: {} {} {}".format( datetime.datetime.fromtimestamp( commit.committed_date), commit.author, commit.message)) if root_logger.isEnabledFor(logging.DEBUG) and num_behind > 0: for commit in commits_behind: logging.debug("\tCommit behind: {} {} {}".format( datetime.datetime.fromtimestamp( commit.committed_date), commit.author, commit.message)) logging.info(" ") else: logging.debug("path {} is not a git repo.\n".format(path))
def main(url): if is_dir(join_path(self.path, url)): index_path = join_path(self.path, url, 'index') if file_exists(index_path + '.html', file_exists_param): print(join_path(self.path, url, 'index.html')) return self.fastread( join_path(self.path, url, 'index.html')) elif file_exists(index_path + '.htm', file_exists_param): return self.fastread( join_path(self.path, url, 'index.htm')) else: return self.error404() elif url[-5:] == '.html' or url[-4:] == '.htm': joined = join_path(self.path, url) if file_exists(joined, file_exists_param): return self.fastread(joined) else: return self.error404() else: joined = join_path(self.path, url) print(joined) if file_exists(joined, file_exists_param): return self.app.send_static_file(url) else: return self.error404()
def make_tree(cls, root, parent=None, is_last=False, criteria=None): """ :param root: :param parent: :param is_last: :param criteria: """ root = Path(str(root)) criteria = criteria or cls._default_criteria displayable_root = cls(root, parent, is_last) yield displayable_root children = sorted( list(path for path in root.iterdir() if criteria(str(path))), key=lambda s: str(s).lower(), ) count = 1 for path in children: is_last = count == len(children) if path.is_dir(): yield from cls.make_tree( path, parent=displayable_root, is_last=is_last, criteria=criteria ) else: # print(path) yield cls(path, displayable_root, is_last) count += 1
def analyze(self, config): for include_path in config.include_paths: path = Path(include_path) if path.is_dir(): self._lint_files(find_vim_script([path])) elif bool(re.match(VIM_SCRIPT_FILE_NAME_PATTERNS, path.name)): self._lint_file(path)
def checkout_same_branch_in_workspace(workspace_path_str, branch_name): root_path = pathlib.Path(workspace_path_str) if not root_path.exists(): logging.warning( "Cannot pull changes because the workspace {} does not exist!". format(workspace_path_str)) return for path in root_path.iterdir(): if path.is_dir(): try: repo = git.Repo(path, search_parent_directories=True) except: logging.debug( "**** path {0} was found to not be a git repo. ********". format(path)) continue if repo: repo.git.fetch("--all") remote_br_name = get_remote_branch_name(repo, branch_name) if remote_br_name: repo.git.checkout(remote_br_name) repo.git.pull() logging.info( "Checked out branch {} and pulled updates in the git repo {}" .format(branch_name, path)) else: logging.info( "The git repo {} does not have a branch '{}'".format( path, branch_name))
def run(self): global CMAKE_BUILD_DIR self.jobs = multiprocessing.cpu_count() plat_specifier = '.{0}-{1}.{2}'.format(self.plat_name, *sys.version_info[:2]) self.build_temp = os.path.join(self.build_base, 'temp' + plat_specifier, self.config) # if setup.py is directly called use CMake to build product if CMAKE_BUILD_DIR == '.': # set path to the root of OpenVINO CMakeList file openvino_root_dir = Path(__file__).resolve().parents[4] self.announce(f'Configuring cmake project: {openvino_root_dir}', level=3) self.spawn(['cmake', '-H' + str(openvino_root_dir), '-B' + self.build_temp, '-DCMAKE_BUILD_TYPE={type}'.format(type=self.config), '-DENABLE_PYTHON=ON', '-DENABLE_OV_ONNX_FRONTEND=ON']) self.announce('Building binaries', level=3) self.spawn(['cmake', '--build', self.build_temp, '--config', self.config, '-j', str(self.jobs)]) CMAKE_BUILD_DIR = self.build_temp self.run_command('build_clib') build.run(self) # Copy extra package_data content filtered by find_packages dst = Path(self.build_lib) src = Path(get_package_dir(PY_INSTALL_CFG)) exclude = ignore_patterns('*ez_setup*', '*__pycache__*', '*.egg-info*') for path in src.glob('**/*'): if path.is_dir() or exclude(str(path)): continue path_rel = path.relative_to(src) (dst / path_rel.parent).mkdir(exist_ok=True, parents=True) copyfile(path, dst / path_rel)
def folder(raw_path, expandvars=False, exists=False, reset=False, touch=False): if expandvars: raw_path = os.path.expandvars(raw_path) path = Path(raw_path) if exists: if not path.exists(): raise FileNotFoundError(f'{raw_path} not found.') if not path.is_dir(): raise NotADirectoryError(f'{raw_path} should be a folder.') if reset: if path.exists(): # Remove children instead. for child in path.iterdir(): if child.is_dir(): try: shutil.rmtree(child) except OSError: logging.warning(f'Cannot remove folder {child}.') else: child.unlink() else: os.makedirs(path, exist_ok=True) if touch: os.makedirs(path, exist_ok=True) return path
def compare_against_master_branch_in_workspace(root_path_str, branch_name, parent_branch_name): root_path = pathlib.Path(root_path_str) logging.getLogger("git").setLevel(logging.WARNING) logging.info( "{:<40} {:<20} {:<20} {:<20} {:<20} # Commits Ahead # Commits Behind" .format("Repository", "Master branch", "Date last commit", "Develop branch", "Date last commit", "#")) for path in root_path.iterdir(): if path.is_dir(): try: repo = git.Repo(path) except: logging.debug( "**** path {0} was found to not be a git repo. ********". format(path)) continue if repo: if repo.is_dirty() and not root_path.samefile( environment.PYTHON_WORKSPACE_PATH): logging.warning( "{} contains modifications and do not want to overwrite them." .format(path)) else: compare_against_master_branch(path, repo, branch_name, parent_branch_name)
def _glob_resources( resource_path: pathlib.Path, subdir: str, ext: str, ) -> Iterable[str]: """Find resources with the given extension. Yields a resource name like "html/log.html" (as string). """ assert '*' not in ext, ext assert ext.startswith('.'), ext path = resource_path / subdir if isinstance(resource_path, pathlib.Path): for full_path in path.glob(f'*{ext}'): # . is contained in ext yield full_path.relative_to(resource_path).as_posix() else: # zipfile.Path or importlib_resources compat object # Unfortunately, we can't tell mypy about resource_path being of type # Union[pathlib.Path, zipfile.Path] because we set "python_version = 3.6" in # .mypy.ini, but the zipfiel stubs (correctly) only declare zipfile.Path with # Python 3.8... assert path.is_dir(), path # type: ignore[unreachable] for subpath in path.iterdir(): if subpath.name.endswith(ext): yield posixpath.join(subdir, subpath.name)
def path_dir(s: str): path = pathlib.Path(s).absolute() if not path.is_dir(): raise argparse.ArgumentTypeError('%r does not exist or is not a directory' % s) return path
def remove(self): """ Remove repository from file system. """ if self.is_initialized() and not self.is_clean(): raise LocallyModifiedError if self._path and is_dir(self._path): shutil.rmtree(self._path) _logger.info('The repository %s has been removed.', self._path)
def VerifyPath(name, path, isFileExpected): if not os.path.exists(path): raise Exception(f"{name} path does not exist: [{path}]") if (not isFileExpected) and not path.is_dir(): raise Exception(f"{name} path is not a folder: [{path}]") if isFileExpected and not path.is_file(): raise Exception(f"{name} path is not a file: [{path}]")
def collect_files(self, path): if path.is_dir(): for root, dirs, files in os.walk(str(path)): for file_ in files: self._classify_file(pathlib.Path(root, file_)) else: self._classify_file(path)
def filelist_from_patterns(patterns, ignore=None, base='.', sizesort=False): base = Path(base or '.').expanduser() filenames = set() for pattern in patterns or []: path = base / pattern if path.is_file(): filenames.add(path) continue if path.is_dir(): path += '/*' parts = path.parts[1:] if path.is_absolute() else path.parts pattern = str(Path("").joinpath(*parts)) filenames.update( (p for p in Path(path.root).glob(pattern) if not p.is_dir())) filenames = list(filenames) def excluded(path): if any(path.match(ex) for ex in ignore): return True for part in path.parts: if any(Path(part).match(ex) for ex in ignore): return True if ignore: filenames = [path for path in filenames if not excluded(path)] if sizesort: filenames.sort(key=lambda f: f.stat().st_size) return filenames
def __init__(self, *paths): paths = list(OrderedDict.fromkeys(paths)) # remove duplicates self._paths = list(map(Path, paths)) if not self._paths: message = 'MultiplexedPath must contain at least one path' raise FileNotFoundError(message) if any(not path.is_dir() for path in self._paths): raise NotADirectoryError('MultiplexedPath only supports directories')
def render(path): assert path.is_dir() index_dirs = sorted([f for f in path.iterdir() if f.is_dir()]) indexes = indent(text=''.join(render_index(path) for path in index_dirs), by=3) return template.format(indexes=indexes)
def render_index(path): assert path.is_dir() return index_section_template.format( name=escape(path.name.capitalize()), entries='\n'.join( indent(lines=(render_index_entry(f) for f in sorted( path.glob('*.html'), key=lambda p: extract_numbers(p.stem))))))
def checkSubFolders(moduleDirectory, subDirectories): """Verify that subfolders exist in the module""" for subDirectory in subDirectories: path = Path(moduleDirectory) / Path(subDirectory) errMsg = f"Cannot handle the given folder {str(path)}" if not path.exists(): raise Exception(errMsg + " because it does not exist.") if not path.is_dir(): raise Exception(errMsg + " because it is not a directory.")
def is_dir(path): # type: (Path, ) -> Path """Checks if a path is an actual directory""" if not path.is_dir(): msg = f"{path} is not a directory" raise ArgumentTypeError(msg) return path
def md5_update_from_dir(directory, hash): assert Path(directory).is_dir() for path in sorted(Path(directory).iterdir()): hash.update(path.name.encode()) if path.is_file(): hash = md5_update_from_file(path, hash) elif path.is_dir(): hash = md5_update_from_dir(path, hash) return hash
def set_path_readonly(path: Path) -> None: if path.is_dir(): # Need to add right = stat.S_IXUSR | stat.S_IRGRP | stat.S_IXGRP | stat.S_IRUSR else: # Already in read only right = stat.S_IRGRP | stat.S_IRUSR if path.stat().st_mode & ~right != 0: path.chmod(right)
def _collect_files(paths): result = set() for path in paths: if path.is_dir(): dir_path = path result |= _collect_files(tuple(dir_path.iterdir())) else: file_path = path result.add(file_path) return result
def unset_path_readonly(path: Path) -> None: if path.is_dir(): right = ( stat.S_IXUSR | stat.S_IRGRP | stat.S_IXGRP | stat.S_IRUSR | stat.S_IWGRP | stat.S_IWUSR ) else: right = stat.S_IRGRP | stat.S_IRUSR | stat.S_IWGRP | stat.S_IWUSR if path.stat().st_mode & right != right: path.chmod(right)
def addAllFiles(targetDir, sourceDir): #Get everything recursively. rootpath = Path(sourceDir) paths = list(rootpath.glob("**/*")) print(targetDir) print(sourceDir) for path in paths: rel = str(path.relative_to(sourceDir)) if(path.is_dir()): ensureFolder(targetDir+slash+rel) else: ensureFolder((targetDir+slash+rel).rpartition(slash)[0]) if not (Path(targetDir+slash+rel).exists()): #in dst, src order makeHardLink(targetDir+slash+rel, str(path.absolute())) addToLinkManifest(targetDir+slash+rel, str(path.absolute()))
def normalize_event_filename(filename: Union[str, Path], action: bool = True) -> Path: """ Normalize a file name. :param unicode filename: The file name to normalize. :param bool action: Apply changes on the file system. :return Path: The normalized file name. """ import unicodedata path = Path(filename) # NXDRIVE-688: Ensure the name is stripped for a file stripped = Path(str(path).strip()) if all( [ not WINDOWS, # Windows does not allow files/folders ending with space(s) action, path != stripped, path.exists(), not path.is_dir(), ] ): # We can have folders ending with spaces log.info(f"Forcing space normalization: {path!r} -> {stripped!r}") path.rename(stripped) path = stripped # NXDRIVE-188: Normalize name on the file system, if needed normalized = Path(unicodedata.normalize("NFC", str(path))) normalized = normalized.with_name(safe_os_filename(normalized.name)) if WINDOWS and path.exists(): path = normalized_path(path).with_name(path.name) if not MAC and action and path != normalized and path.exists(): log.info(f"Forcing normalization: {path!r} -> {normalized!r}") path.rename(normalized) return normalized
# gloss uses a single system installation directory for all files, to ease removal and upgrade. # a custom installation directory can be speficied as an argument to the installation scripts. # please note that custom directories are not well tested. install_prefix = '/usr/local' # parse arguments. if len(argv) > 2: exit('usage: optionally specify a custom installation prefix.') if len(argv) == 2: install_prefix = argv[1] if ' ' in install_prefix: exit("installation prefix contains space.") # determine the gloss source directory. src_dir = abs_path(path_join(path_dir(argv[0]), '..')) if not is_dir(src_dir): exit('bad source directory: ' + src_dir) dst_dir = path_join(install_prefix, 'gloss') uname = os_uname()[0].lower() if uname == 'darwin': platform = 'mac' elif uname == 'linux': with open('/etc/issue') as f: # get the first word from the issue string (e.g. 'Fedora') platform = f.readline().split()[0].lower() else: platform = uname.lower() errSL('src_dir:', src_dir) errSL('dst_dir:', dst_dir)
#!/usr/bin/env python # coding=utf-8 import os.path import pathlib # 检查给定路径是否是文件,能够区分文件和目录 print(os.path.isfile('/etc/passwd')) # True print(os.path.isfile('/etc')) # False print(os.path.isfile('/does/not/exists')) # False # 检查给定路径是否存在,无法能够区分文件和目录 print(os.path.exists('/etc/passwd')) # True print(os.path.exists('/etc')) # True print(os.path.exists('/does/not/exists')) # False # Python2需要安装pathlib, Python3中pathlib是内置模块,无需安装 path = pathlib.Path("/etc/passwd") print(path.exists()) # True print(path.is_file()) # True print(path.is_dir()) # False