def test_isabs(self): self.assertIs(posixpath.isabs(""), False) self.assertIs(posixpath.isabs("/"), True) self.assertIs(posixpath.isabs("/foo"), True) self.assertIs(posixpath.isabs("/foo/bar"), True) self.assertIs(posixpath.isabs("foo/bar"), False) self.assertRaises(TypeError, posixpath.isabs)
def test_isabs(self): self.assertIs(posixpath.isabs(''), False) self.assertIs(posixpath.isabs('/'), True) self.assertIs(posixpath.isabs('/foo'), True) self.assertIs(posixpath.isabs('/foo/bar'), True) self.assertIs(posixpath.isabs('foo/bar'), False) self.assertIs(posixpath.isabs(b''), False) self.assertIs(posixpath.isabs(b'/'), True) self.assertIs(posixpath.isabs(b'/foo'), True) self.assertIs(posixpath.isabs(b'/foo/bar'), True) self.assertIs(posixpath.isabs(b'foo/bar'), False)
def test_isabs(self): self.assertIs(posixpath.isabs(""), False) self.assertIs(posixpath.isabs("/"), True) self.assertIs(posixpath.isabs("/foo"), True) self.assertIs(posixpath.isabs("/foo/bar"), True) self.assertIs(posixpath.isabs("foo/bar"), False) self.assertIs(posixpath.isabs(b""), False) self.assertIs(posixpath.isabs(b"/"), True) self.assertIs(posixpath.isabs(b"/foo"), True) self.assertIs(posixpath.isabs(b"/foo/bar"), True) self.assertIs(posixpath.isabs(b"foo/bar"), False)
def _is_current_platform_abspath(path): """ Check if the path is an obsolute path for the current platform. :param str path: Path to validate. :returns bool: True if absolute for this platform, False otherwise. """ if is_windows(): # ntpath likes to consider a path starting with / to be absolute, # but it is not! return ntpath.isabs(path) and not posixpath.isabs(path) else: return posixpath.isabs(path)
def _is_current_platform_abspath(path): """ Check if the path is an obsolute path for the current platform. :param str path: Path to validate. :returns bool: True if absolute for this platform, False otherwise. """ if sys.platform == "win32": # ntpath likes to consider a path starting with / to be absolute, # but it is not! return ntpath.isabs(path) and not posixpath.isabs(path) else: return posixpath.isabs(path)
def write_readback(self, data, name, options, read_options=None): # Randomly convert the name to other path types. path_choices = (str, bytes, pathlib.PurePath, pathlib.PurePosixPath, pathlib.PureWindowsPath, pathlib.Path) name_type_w = random.choice(path_choices) name_type_r = random.choice(path_choices) # Name to write with. if name_type_w == bytes: name_w = name.encode('utf-8') elif name_type_w in (pathlib.PurePath, pathlib.PurePosixPath, pathlib.PosixPath): name_w = name_type_w(name) elif name_type_w != str: name_w = name_type_w(name[posixpath.isabs(name):]) else: name_w = name # Name to read with. if name_type_r == bytes: name_r = name.encode('utf-8') elif name_type_r in (pathlib.PurePath, pathlib.PurePosixPath, pathlib.PosixPath): name_r = name_type_r(name) elif name_type_r != str: name_r = name_type_r(name[posixpath.isabs(name):]) else: name_r = name # Write the data to the proper file with the given name, read it # back, and return the result. The file needs to be deleted # after to keep junk from building up. Different options can be # used for reading the data back. f = None try: f = tempfile.mkstemp() os.close(f[0]) filename = f[1] hdf5storage.write(data, path=name_w, filename=filename, options=options) out = hdf5storage.read(path=name_r, filename=filename, options=read_options) except: raise finally: if f is not None: os.remove(f[1]) return out
def __setup(self): """Construct the series of shell commands, i.e., fill in self.__commands""" # Get source self.__commands.append( self.download_step(recursive=self.__recursive, wd=self.__wd)) # directory containing the unarchived package if self.__directory: if posixpath.isabs(self.__directory): self.src_directory = self.__directory else: self.src_directory = posixpath.join(self.__wd, self.__directory) # Build if self.__build: self.__commands.append('cd {}'.format(self.src_directory)) self.__commands.extend(self.__build) # Install if self.__install: if self.__prefix: self.__commands.append('mkdir -p {}'.format(self.__prefix)) self.__commands.append('cd {}'.format(self.src_directory)) self.__commands.extend(self.__install) # Cleanup remove = [self.src_directory] if self.url: remove.append( posixpath.join(self.__wd, posixpath.basename(self.url))) self.__commands.append(self.cleanup_step(items=remove))
def dispatch(self, request, path="", **kwargs): """Normalizes and splits the path and stores it as tuple in ``self.path``.""" if not posixpath.isabs(path): path = f"/{path}" path = posixpath.normpath(path) self.path = tuple(part for part in path.split("/") if part) return super().dispatch(request, **kwargs)
def is_existing_path(self, path, check=os.path.exists, require_source_folder=False, discovered_path_ok=True): if self.condition_is_checked("EXISTS %s" % path) or (check == os.path.isdir and self.condition_is_checked("IS_DIRECTORY %s" % path)): return True tmp = path.replace(os.path.sep, "/") if tmp.startswith(PathConstants.PACKAGE_SOURCE): tmp = path[len(PathConstants.PACKAGE_SOURCE) + 1:] if check(os.path.normpath(os.path.join(self.path, self.subdir, tmp))): return True tmp = posixpath.normpath(posixpath.join(self.var["CMAKE_CURRENT_SOURCE_DIR"], path.replace(os.path.sep, "/"))) if tmp.startswith(PathConstants.PACKAGE_SOURCE): if not require_source_folder and not posixpath.isabs(path) and tmp[len(PathConstants.PACKAGE_SOURCE) + 1:] in self.generated_files: return True if not require_source_folder and tmp in self.generated_files: return True return check(os.path.join(self.path, os.path.normpath(tmp[len(PathConstants.PACKAGE_SOURCE) + 1:]))) if not require_source_folder and tmp.startswith(PathConstants.PACKAGE_BINARY): return tmp[len(PathConstants.PACKAGE_BINARY) + 1:] in self.generated_files if not require_source_folder and tmp in self.generated_files: return True if not require_source_folder and tmp.startswith(PathConstants.CATKIN_DEVEL): s = tmp[len(PathConstants.CATKIN_DEVEL) + 1:] for t in ["include", "lib", "share", "bin"]: if s.startswith(t): return True if not require_source_folder and tmp.startswith(PathConstants.CATKIN_INSTALL): s = tmp[len(PathConstants.CATKIN_INSTALL) + 1:] for t in ["include", "lib", "share", "bin"]: if s.startswith(t): return True return tmp.startswith(PathConstants.DISCOVERED_PATH) and discovered_path_ok
def validate_abspath(self, value: str) -> None: if any([ntpath.isabs(value), posixpath.isabs(value)]): raise ValidationError( description="found an absolute path ({}), expected a filename".format(value), platform=self.platform, reason=ErrorReason.FOUND_ABS_PATH, )
def on_final(info): for pkg in info.export_packages - info.export_dep: if info.env.is_known_pkg(pkg): if pkg == "message_runtime": if pkg not in info.exec_dep: info.report(ERROR, "MISSING_DEPEND", pkg=pkg, type="run" if info.manifest.package_format < 2 else "exec", file_location=("package.xml", 0)) else: info.report(ERROR, "MISSING_DEPEND", pkg=pkg, type="run" if info.manifest.package_format < 2 else "build_export", file_location=("package.xml", 0)) for pkg in (info.find_packages & info.build_dep & info.export_dep) - info.export_packages: if re.search(r"_(msg|message)s?(_|$)", pkg) and info.env.is_catkin_pkg(pkg): info.report(WARNING, "SUGGEST_CATKIN_DEPEND", pkg=pkg, file_location=info.location_of("catkin_package")) if info.export_includes and info.libraries and not info.export_libs: info.report(WARNING, "MISSING_EXPORT_LIB", file_location=info.location_of("catkin_package")) if info.executables or info.libraries: for incl in info.export_includes - info.build_includes: info.report(WARNING, "UNUSED_INCLUDE_PATH", path=incl, file_location=info.location_of("catkin_package")) for incl in info.export_includes: if not info.is_existing_path(incl, check=os.path.isdir, require_source_folder=True): info.report(ERROR, "MISSING_INCLUDE_PATH", path=incl, file_location=info.location_of("catkin_package")) includes = info.build_includes | info.export_includes for d1 in includes: if not posixpath.isabs(d1): for d2 in includes: if d1.startswith("%s/" % d2): info.report(WARNING, "AMBIGUOUS_INCLUDE_PATH", path=info.report_path(d1), parent_path=info.report_path(d2)) for lib in info.export_libs: if lib in info.targets: if info.target_outputs[lib] != lib: info.report(ERROR, "EXPORT_LIB_RENAMED", target=lib, file_location=info.location_of("catkin_package")) if lib in info.executables: info.report(ERROR, "EXPORT_LIB_NOT_LIB", target=lib, file_location=info.location_of("catkin_package"))
def _subdirectory(self, info, args): _, args = cmake_argparse(args, {"EXCLUDE_FROM_ALL": "-"}) subdir = info.source_relative_path(args[0]) real_subdir = info.real_path(subdir) if posixpath.isabs(subdir): info.report(ERROR, "EXTERNAL_SUBDIR", subdir=subdir) return if not os.path.isdir(real_subdir): info.report(ERROR, "MISSING_SUBDIR", subdir=subdir) return if subdir in info.subdirs: info.report(ERROR, "DUPLICATE_SUBDIR", subdir=subdir) return info.subdirs.add(subdir) old_subdir = info.subdir old_parent_var = info.parent_var old_find_packages = info.find_packages info.parent_var = info.var info.var = copy(info.var) info.find_packages = copy(info.find_packages) try: info.var["CMAKE_CURRENT_SOURCE_DIR"] = posixpath.join( PathConstants.PACKAGE_SOURCE, subdir) info.var["CMAKE_CURRENT_BINARY_DIR"] = posixpath.join( PathConstants.PACKAGE_BINARY, subdir) info.generated_files.add(subdir) info.subdir = subdir self._parse_file(info, os.path.join(real_subdir, "CMakeLists.txt")) finally: info.var = info.parent_var info.parent_var = old_parent_var info.subdir = old_subdir info.find_packages = old_find_packages
def __init__(self, path, root=Root.builddir, destdir=None, directory=None): if destdir and isinstance(root, Root) and root != Root.absolute: raise ValueError('destdir only applies to absolute or install ' + 'paths') drive, normpath, isdir = self.__normalize(path, expand_user=True) if directory is False and isdir: raise ValueError('expected a non-directory path') if posixpath.isabs(normpath): root = Root.absolute elif root == Root.absolute: raise ValueError("'{}' is not absolute".format(path)) elif isinstance(root, BasePath): normpath, isdir = self.__join(root.suffix, path) if destdir is None: destdir = root.destdir root = root.root if not isinstance(root, (Root, InstallRoot)): raise ValueError('invalid root {!r}'.format(root)) if (normpath == posixpath.pardir or normpath.startswith(posixpath.pardir + posixpath.sep)): raise ValueError("too many '..': path cannot escape root") self.suffix = drive + normpath self.root = root self.directory = directory or isdir or normpath == '' self.destdir = bool(destdir)
def Extract(self): """Extract the tarfile to the current directory.""" if self.verbose: sys.stdout.write('|' + ('-' * 48) + '|\n') sys.stdout.flush() dots_outputted = 0 win32_symlinks = {} for m in self.tar: if self.verbose: cnt = self.read_file.tell() curdots = cnt * 50 / self.read_filesize if dots_outputted < curdots: for dot in xrange(dots_outputted, curdots): sys.stdout.write('.') sys.stdout.flush() dots_outputted = curdots # For hardlinks in Windows, we try to use mklink, and instead copy on # failure. if m.islnk() and sys.platform == 'win32': CreateWin32Link(m.name, m.linkname, self.verbose) # On Windows we treat symlinks as if they were hard links. # Proper Windows symlinks supported by everything can be made with # mklink, but only by an Administrator. The older toolchains are # built with Cygwin, so they could use Cygwin-style symlinks; but # newer toolchains do not use Cygwin, and nothing else on the system # understands Cygwin-style symlinks, so avoid them. elif m.issym() and sys.platform == 'win32': # For a hard link, the link target (m.linkname) always appears # in the archive before the link itself (m.name), so the links # can just be made on the fly. However, a symlink might well # appear in the archive before its target file, so there would # not yet be any file to hard-link to. Hence, we have to collect # all the symlinks and create them in dependency order at the end. linkname = m.linkname if not posixpath.isabs(linkname): linkname = posixpath.join(posixpath.dirname(m.name), linkname) linkname = posixpath.normpath(linkname) win32_symlinks[posixpath.normpath(m.name)] = linkname # Otherwise, extract normally. else: self.tar.extract(m) win32_symlinks_left = win32_symlinks.items() while win32_symlinks_left: this_symlink = win32_symlinks_left.pop(0) name, linkname = this_symlink if linkname in win32_symlinks: # The target is itself a symlink not yet created. # Wait for it to come 'round on the guitar. win32_symlinks_left.append(this_symlink) else: del win32_symlinks[name] CreateWin32Link(name, linkname, self.verbose) if self.verbose: sys.stdout.write('\n') sys.stdout.flush()
def read_file(filename, binary=False): """Get the contents of a file contained with qutebrowser. Args: filename: The filename to open as string. binary: Whether to return a binary string. If False, the data is UTF-8-decoded. Return: The file contents as string. """ assert not posixpath.isabs(filename), filename assert os.path.pardir not in filename.split(posixpath.sep), filename if not binary and filename in _resource_cache: return _resource_cache[filename] if hasattr(sys, 'frozen'): # PyInstaller doesn't support pkg_resources :( # https://github.com/pyinstaller/pyinstaller/wiki/FAQ#misc fn = os.path.join(os.path.dirname(sys.executable), filename) if binary: with open(fn, 'rb') as f: return f.read() else: with open(fn, 'r', encoding='utf-8') as f: return f.read() else: data = pkg_resources.resource_string(qutebrowser.__name__, filename) if not binary: data = data.decode('UTF-8') return data
def _abspath(root, value): # not all variables are paths: only absolutize if it looks like a relative path if root and \ (value.startswith('./') or \ ('/' in value and not (posixpath.isabs(value) or ntpath.isabs(value)))): value = os.path.join(root, value) return value
def _subdirectory(self, info, args): _, args = cmake_argparse(args, {"EXCLUDE_FROM_ALL": "-"}) subdir = info.source_relative_path(args[0]) real_subdir = info.real_path(subdir) if posixpath.isabs(subdir): info.report(ERROR, "EXTERNAL_SUBDIR", subdir=subdir) return if not os.path.isdir(real_subdir): info.report(ERROR, "MISSING_SUBDIR", subdir=subdir) return if subdir in info.subdirs: info.report(ERROR, "DUPLICATE_SUBDIR", subdir=subdir) return info.subdirs.add(subdir) old_subdir = info.subdir old_parent_var = info.parent_var old_find_packages = info.find_packages info.parent_var = info.var info.var = copy(info.var) info.find_packages = copy(info.find_packages) try: info.var["CMAKE_CURRENT_SOURCE_DIR"] = posixpath.join(PathConstants.PACKAGE_SOURCE, subdir) info.var["CMAKE_CURRENT_BINARY_DIR"] = posixpath.join(PathConstants.PACKAGE_BINARY, subdir) info.generated_files.add(subdir) info.subdir = subdir self._parse_file(info, os.path.join(real_subdir, "CMakeLists.txt")) finally: info.var = info.parent_var info.parent_var = old_parent_var info.subdir = old_subdir info.find_packages = old_find_packages
def resolve_perl_module_req(apt_mgr, req): DEFAULT_PERL_PATHS = [ "/usr/share/perl5", "/usr/lib/.*/perl5/.*", "/usr/lib/.*/perl-base", "/usr/lib/.*/perl/[^/]+", "/usr/share/perl/[^/]+" ] if req.inc is None: if req.filename is None: paths = [ posixpath.join( inc, re.escape(req.module.replace('::', '/') + '.pm')) for inc in DEFAULT_PERL_PATHS ] regex = True elif not posixpath.isabs(req.filename): paths = [ posixpath.join(inc, re.escape(req.filename)) for inc in DEFAULT_PERL_PATHS ] regex = True else: paths = [req.filename] regex = False else: regex = False paths = [posixpath.join(inc, req.filename) for inc in req.inc] return find_reqs_simple(apt_mgr, paths, regex=regex)
def git_url(self) -> str: user = f"{self.username}@" if self.username else "" port = f":{self.port}" if self.port is not None else "" path = f"{self.root_dir}" if self.root_dir else "" if path and not posixpath.isabs(path): path = f"/~/{path}" return f"ssh://{user}{self.host}{port}{path}"
def un_posix(valid_posix_path,drive=None): if os.name == "posix": return valid_posix_path else: global drives if not posixpath.isabs(valid_posix_path): return valid_posix_path# what to do? for now assert assert posixpath.isabs(valid_posix_path), "un_posix() needs an absolute posix style path, not %s" % valid_posix_path #drive = get_drive_by_hash(valid_posix_path) drive = get_drive(valid_posix_path) assert drive, "We cannot make this path (%s) local to the platform without knowing the drive" % valid_posix_path path = systempath.join(drive,systempath.normpath(valid_posix_path)) return path
def translate_path(self, path): path = path.split("?", 1)[0] path = path.split("#", 1)[0] path = posixpath.normpath(path) assert posixpath.isabs(path) path = posixpath.relpath(path, "/") return os.path.join(self.get_root_dir(), path)
def _include_file(self, info, args): opts, args = cmake_argparse(args, {"OPTIONAL": "-", "RESULT_VARIABLE": "?", "NO_POLICY_SCOPE": "-"}) if not args: return if "/" not in args[0] and "." not in args[0]: incl_file = "NOTFOUND" else: incl_file = info.source_relative_path(args[0]) if incl_file.startswith(PathConstants.DISCOVERED_PATH): return skip_parsing = False if info.manifest.name in self._include_blacklist: for glob_pattern in self._include_blacklist[info.manifest.name]: if fnmatch(incl_file, glob_pattern): skip_parsing = True break real_file = os.path.join(info.path, os.path.normpath(incl_file)) if os.path.isfile(real_file): if not skip_parsing: self._parse_file(info, real_file) else: if not opts["OPTIONAL"]: if posixpath.isabs(incl_file): info.report(ERROR, "EXTERNAL_FILE", cmd="include", file=args[0]) else: info.report(ERROR, "MISSING_FILE", cmd="include", file=incl_file) incl_file = "NOTFOUND" if opts["RESULT_VARIABLE"]: info.var[opts["RESULT_VARIABLE"]] = incl_file
def __init__(self, datastoreRootUri: Union[ButlerURI, str], path: str): if isinstance(datastoreRootUri, str): datastoreRootUri = ButlerURI(datastoreRootUri, forceDirectory=True) elif not isinstance(datastoreRootUri, ButlerURI): raise ValueError("Datastore root must be a ButlerURI instance") if not posixpath.isabs(datastoreRootUri.path): raise ValueError( f"Supplied URI must be an absolute path (given {datastoreRootUri})." ) self._datastoreRootUri = datastoreRootUri pathModule: types.ModuleType if self._datastoreRootUri.scheme == "file": pathModule = os.path else: pathModule = posixpath # mypy can not work out that these modules support isabs if pathModule.isabs(path): # type: ignore raise ValueError( "Path within datastore must be relative not absolute") self._path = path
def normalize_path(path): path = posixize_path(osp.normpath(path)) if not osp.isabs(path): path = osp.join('/', path) if osp.basename(path) == "index.html": path = osp.dirname(path) return path
def get_top_srcdir(self, file): """Returns a normalized top_srcdir for the given file: if substs['top_srcdir'] is a relative path, it is relative to the topobjdir. Adjust it to be relative to the file path.""" top_srcdir = self.substs["top_srcdir"] if posixpath.isabs(top_srcdir) or ntpath.isabs(top_srcdir): return top_srcdir return posixpath.normpath(posixpath.join(self.get_depth(file), top_srcdir))
def _is_absolute_path(self, path): if posixpath.isabs(path): return True if ntpath.isabs(path): return True return False
def _resolve_dir(sftp, dst_dir): dst_home = sftp.normalize('.') dst_dir = dst_dir.split(':')[-1].replace('\\', '/') if dst_dir == '.': dst_dir = dst_home + '/' if not posixpath.isabs(dst_dir): dst_dir = posixpath.join(dst_home, dst_dir) return dst_dir
def resolve(self, hdfs_path): """Return absolute, normalized path, with special markers expanded. :param hdfs_path: Remote path. Currently supported markers: * `'#LATEST'`: this marker gets expanded to the most recently updated file or folder. They can be combined using the `'{N}'` suffix. For example, `'foo/#LATEST{2}'` is equivalent to `'foo/#LATEST/#LATEST'`. """ path = hdfs_path if not psp.isabs(path): if not self.root or not psp.isabs(self.root): root = self._get_home_directory('/').json()['Path'] self.root = psp.join(root, self.root) if self.root else root _logger.debug('Updated root to %r.', self.root) path = psp.join(self.root, path) path = psp.normpath(path) def expand_latest(match): """Substitute #LATEST marker.""" prefix = match.string[:match.start()] suffix = '' n = match.group(1) # n as in {N} syntax for _ in repeat(None, int(n) if n else 1): statuses = self._list_status(psp.join(prefix, suffix)).json() candidates = sorted([ (-status['modificationTime'], status['pathSuffix']) for status in statuses['FileStatuses']['FileStatus'] ]) if not candidates: raise HdfsError('Cannot expand #LATEST. %r is empty.', prefix) elif len(candidates) == 1 and candidates[0][1] == '': raise HdfsError('Cannot expand #LATEST. %r is a file.', prefix) suffix = psp.join(suffix, candidates[0][1]) return '/' + suffix path = re.sub(r'/?#LATEST(?:{(\d+)})?(?=/|$)', expand_latest, path) # #LATEST expansion (could cache the pattern, but not worth it) _logger.debug('Resolved path %r to %r.', hdfs_path, path) return quote(path, '/=')
def construct_asset_path(self, asset_path, css_path, output_filename, variant=None): """Return a rewritten asset URL for a stylesheet""" public_path = self.absolute_path(asset_path, os.path.dirname(css_path).replace('\\', '/')) if self.embeddable(public_path, variant): return "__EMBED__%s" % public_path if not posixpath.isabs(asset_path): asset_path = self.relative_path(public_path, output_filename) return asset_path
def resolve(self, hdfs_path): """Return absolute, normalized path, with special markers expanded. :param hdfs_path: Remote path. Currently supported markers: * `'#LATEST'`: this marker gets expanded to the most recently updated file or folder. They can be combined using the `'{N}'` suffix. For example, `'foo/#LATEST{2}'` is equivalent to `'foo/#LATEST/#LATEST'`. """ path = hdfs_path if not psp.isabs(path): if not self.root or not psp.isabs(self.root): root = self._get_home_directory("/").json()["Path"] self.root = psp.join(root, self.root) if self.root else root _logger.debug("Updated root to %r.", self.root) path = psp.join(self.root, path) path = psp.normpath(path) def expand_latest(match): """Substitute #LATEST marker.""" prefix = match.string[: match.start()] suffix = "" n = match.group(1) # n as in {N} syntax for _ in repeat(None, int(n) if n else 1): statuses = self._list_status(psp.join(prefix, suffix)).json() candidates = sorted( [ (-status["modificationTime"], status["pathSuffix"]) for status in statuses["FileStatuses"]["FileStatus"] ] ) if not candidates: raise HdfsError("Cannot expand #LATEST. %r is empty.", prefix) elif len(candidates) == 1 and candidates[0][1] == "": raise HdfsError("Cannot expand #LATEST. %r is a file.", prefix) suffix = psp.join(suffix, candidates[0][1]) return "/" + suffix path = re.sub(r"/?#LATEST(?:{(\d+)})?(?=/|$)", expand_latest, path) # #LATEST expansion (could cache the pattern, but not worth it) _logger.debug("Resolved path %r to %r.", hdfs_path, path) return path
def get_top_srcdir(self, file): '''Returns a normalized top_srcdir for the given file: if substs['top_srcdir'] is a relative path, it is relative to the topobjdir. Adjust it to be relative to the file path.''' top_srcdir = self.substs['top_srcdir'] if posixpath.isabs(top_srcdir) or ntpath.isabs(top_srcdir): return top_srcdir return posixpath.normpath(posixpath.join(self.get_depth(file), top_srcdir))
def __init__( self, path: Union[str, Path, "ZipPath"], *, mode: str = "r", at: str = "", # pylint: disable=invalid-name allow_zip64: bool = True, compression: int = zipfile.ZIP_DEFLATED, compresslevel: Optional[int] = None, name_to_info: Optional[Dict[str, zipfile.ZipInfo]] = None, info_order: Sequence[str] = (), ): """Initialise a zip path item. :param path: the path to the zip file, or another instance of a ZipPath :param at: the path within the zipfile (always use posixpath `/` separators) :param mode: the mode with which to open the zipfile, either read 'r', write 'w', exclusive create 'x', or append 'a' write only options: :param allow_zip64: if True, the ZipFile will create files with ZIP64 extensions when needed :param compression: compression type ``zipfile.ZIP_STORED`` (no compression), ``zipfile.ZIP_DEFLATED`` (requires zlib), ``zipfile.ZIP_BZIP2`` (requires bz2) or ``zipfile.ZIP_LZMA`` (requires lzma) :param name_to_info: The dictionary for storing mappings of filename -> ``ZipInfo``, if ``None``, defaults to ``{}``. This can be used to implement on-disk storage of the zip central directory :param info_order: ``ZipInfo`` for these file names will be written first to the zip central directory. These allows for faster reading of key files, in a zip that contains many 1000s of files (see ``FilteredZipInfo``). """ if posixpath.isabs(at): raise ValueError(f"'at' cannot be an absolute path: {at}") assert not any(p == ".." for p in at.split( posixpath.sep)), "'at' should not contain any '..'" # Note ``zipfile.ZipInfo.filename`` of directories always end `/` # but we store without, to e.g. correctly compute parent/file names self._at = at.rstrip("/") if isinstance(path, (str, Path)): self._filepath = Path(path) self._zipfile = ZipFileExtra( path, mode=mode, compression=compression, compresslevel=compresslevel, allowZip64=allow_zip64, name_to_info=name_to_info, info_order=info_order, ) else: self._filepath = path._filepath self._zipfile = path._zipfile
def am_add_srcdir(path, am, prefix =""): dir = path if dir[0] == '$': return "" elif not posixpath.isabs(dir): dir = "$(srcdir)/" + dir else: return "" return prefix+dir
def validate_path(path, ctx=None): ctx = ctx or validation.Context.raise_on_error(prefix='Invalid path: ') if not path: ctx.error('not specified') return if posixpath.isabs(path): ctx.error('must not be absolute: %s', path) if any(p in ('.', '..') for p in path.split(posixpath.sep)): ctx.error('must not contain ".." or "." components: %s', path)
def get_archive(self, path): """Get a file or directory from the container and make it into an `Archive` object.""" if self.working_dir is not None and not posixpath.isabs(path): path = posixpath.join(self.working_dir, path) strm, stat = self.client.get_archive(self.container_id, path) return Archive('r', strm.read())
def _make_absolute(self, inpath): """Makes the given path absolute if it's not already. It is assumed that the path is relative to self._homedir""" (scheme, netloc, path, query, fragment) = urlparse.urlsplit(inpath) if scheme or posixpath.isabs(path): # if a scheme is specified, assume it's absolute. return path return posixpath.join(get_whoops("/").home(), path)
def _is_abs(path): """ Check if path is absolute on any platform. :param str path: Path to validate. :returns bool: True is absolute on any platform, False otherwise. """ return posixpath.isabs(path) or ntpath.isabs(path)
def validate_path(path, ctx=None): ctx = ctx or validation.Context.raise_on_error(prefix="Invalid path: ") if not path: ctx.error("not specified") return if posixpath.isabs(path): ctx.error("must not be absolute: %s", path) if any(p in (".", "..") for p in path.split(posixpath.sep)): ctx.error('must not contain ".." or "." components: %s', path)
def resolve_binary_req(apt_mgr, req): if posixpath.isabs(req.binary_name): paths = [req.binary_name] else: paths = [ posixpath.join(dirname, req.binary_name) for dirname in ["/usr/bin", "/bin"] ] return find_reqs_simple(apt_mgr, paths)
def resolve(self, hdfs_path): """Return absolute, normalized path, with special markers expanded. :param hdfs_path: Remote path. Currently supported markers: * `'#LATEST'`: this marker gets expanded to the most recently updated file or folder. They can be combined using the `'{N}'` suffix. For example, `'foo/#LATEST{2}'` is equivalent to `'foo/#LATEST/#LATEST'`. """ path = hdfs_path if not posixpath.isabs(path): if not self.root: raise HdfsError('Path %r is relative but no root found.', path) if not posixpath.isabs(self.root): raise HdfsError('Non-absolute root found: %r', self.root) path = posixpath.join(self.root, path) path = posixpath.normpath(path) def expand_latest(match): """Substitute #LATEST marker.""" prefix = match.string[:match.start()] suffix = '' n = match.group(1) # n as in {N} syntax for _ in repeat(None, int(n) if n else 1): statuses = self._list_status(posixpath.join(prefix, suffix)).json() candidates = sorted([ (-status['modificationTime'], status['pathSuffix']) for status in statuses['FileStatuses']['FileStatus'] ]) if not candidates: raise HdfsError('Cannot expand #LATEST. %r is empty.', prefix) elif len(candidates) == 1 and candidates[0][1] == '': raise HdfsError('Cannot expand #LATEST. %r is a file.', prefix) suffix = posixpath.join(suffix, candidates[0][1]) return '/' + suffix path = re.sub(r'/?#LATEST(?:{(\d+)})?(?=/|$)', expand_latest, path) # #LATEST expansion (could cache the pattern, but not worth it) self._logger.debug('Resolved path %r to %r.', hdfs_path, path) return quote(path, '/=')
def handleMatch(self, m): try: ref = m.group(9) except IndexError: ref = None shortref = False if not ref: # if we got something like "[Google][]" or "[Google]" # we'll use "google" as the id ref = m.group(2) shortref = True # Clean up linebreaks in ref ref = self.NEWLINE_CLEANUP_RE.sub(' ', ref) text = m.group(2) id = ref.lower() if id in self.markdown.references: href, title = self.markdown.references[id] else: anchor = None if '#' in ref: ref, anchor = ref.split('#', 1) this = self.markdown.this if not posixpath.isabs(ref): # treat empty ref as reference to current page if not ref: ref = this['components'][-1] rootrelpath = '/' + '/'.join(this['components'][:-1]) id = posixpath.normpath(posixpath.join(rootrelpath, ref)) id = id.lower() else: id = ref.lower() ref = ref.lower() if ref in self.markdown.site['reflinks']: if (ref != id) and (id in self.markdown.site['reflinks']): raise UrubuError(_error.ambig_ref_md, msg=ref, fn=this['fn']) id = ref if id in self.markdown.site['reflinks']: item = self.markdown.site['reflinks'][id] href, title = item['url'], item['title'] if shortref: text = title if anchor is not None: text = anchor if anchor is not None: anchor = toc.slugify(anchor, '-') href = '%s#%s' % (href, anchor) anchorref = '%s#%s' % (id, anchor) self.markdown.this['_anchorrefs'].add(anchorref) else: # ignore undefined refs urubu_warn(_warning.undef_ref_md, msg=ref, fn=this['fn']) return None return self.makeTag(href, title, text)
def FileNameReturnKey(self, event): from posixpath import isabs, expanduser, join from string import strip # if its a relative path then include the cwd in the name name = strip(self.fileNameEntry.get()) if not isabs(expanduser(name)): self.fileNameEntry.delete(0, 'end') self.fileNameEntry.insert(0, join(self.cwd_print(), name)) self.okButton.flash() self.OkPressed()
def configure(self, opts, args): IRODSClientController.configure(self, opts, args) self.paths = [] for path in args: if rpath.isabs(path): self.paths.append(path) else: self.paths.append(rpath.normpath( rpath.join(self.config.irodsCwd, path)))
def test_pkgserv(httpget, pypistage, testapp): pypistage.mock_simple("package", '<a href="/package-1.0.zip" />') httpget.setextfile("/package-1.0.zip", b"123") r = testapp.get("/root/pypi/+simple/package") assert r.status_code == 200 href = getfirstlink(r.text).get("href") assert not posixpath.isabs(href) url = URL(r.request.url).joinpath(href).url r = testapp.get(url) assert r.body == b"123"
def __write_file(self, fn): assert not posixpath.isabs(fn) assert fn in self.files fullname = os.path.join(self.directory, fn) directory = os.path.dirname(fullname) if not os.path.exists(directory): os.makedirs(directory) assert os.path.isdir(directory) with open(fullname, "wb") as f: f.write(self.get_file_data(fn))
def absolute_path(self, path, start): """ Return the absolute public path for an asset, given the path of the stylesheet that contains it. """ if posixpath.isabs(path): path = posixpath.join(default_storage.location, path) else: path = posixpath.join(start, path) return posixpath.normpath(path)
def test_pkgserv(httpget, extdb, testapp): extdb.mock_simple("package", '<a href="/package-1.0.zip" />') httpget.setextfile("/package-1.0.zip", b"123") r = testapp.get("/root/pypi/+simple/package/") assert r.status_code == 200 href = getfirstlink(r.text).get("href") assert not posixpath.isabs(href) url = resolve_link(r.request.url, href) r = testapp.get(url) assert r.body == b"123"
def _create_missing_remote_path(self, path): if path == '.': return if posixpath.isabs(path): self._client.chdir('/') else: self._client.chdir('.') for dirname in path.split('/'): cwd = self._client.getcwd() if dirname and dirname not in self._client.listdir(cwd): self._client.mkdir(dirname) self._client.chdir(dirname)
def configure(self, opts, args): IRODSClientController.configure(self, opts, args) if args: path = args[0] else: path = self.config.irodsHome if rpath.isabs(path): self.path = path else: self.path = rpath.normpath(rpath.join(self.config.irodsCwd, path))
def safe_join(path, subpath): # This has been inspired by Flask's safe_join() function forbidden = set([os.sep, os.altsep]) - set([posixpath.sep, None]) if any(sep in subpath for sep in forbidden): raise Exception("Illegal directory separator in dependency path %s" % subpath) normpath = posixpath.normpath(subpath) if posixpath.isabs(normpath): raise Exception("Dependency path %s cannot be absolute" % subpath) if normpath == posixpath.pardir or normpath.startswith(posixpath.pardir + posixpath.sep): raise Exception("Dependency path %s has to be inside the repository" % subpath) return os.path.join(path, *normpath.split(posixpath.sep))