def dircp(src: Path, dst: Path, make_rw: bool = False) -> None: """ Powerful folder copyier. """ assert isdir(src) assert not isdir(dst) tmppath = Path(dst + '.tmp') copytree(src, tmppath) if make_rw: dirrw(tmppath) rename(tmppath, dst)
def assert_store_initialized(S: SPath) -> None: assert isdir(storage(S)), \ (f"Looks like the Pylightnix store ('{PYLIGHTNIX_STORE}') is not initialized. Did " f"you call `store_initialize`?") assert isdir(tempdir()), \ (f"Looks like the Pylightnix tmp ('{tempdir()}') is not initialized. Did " f"you call `store_initialize`?") assert lstat((storage(S))).st_dev == lstat(tempdir()).st_dev, \ (f"Looks like Pylightnix store and tmp directories belong to different filesystems. " f"This case is not supported yet. Consider setting PYLIGHTNIX_TMP to be on the same " f"device with PYLIGHTNIX_STORE")
def gc_exceptions(keep_paths:List[Path])->Tuple[List[DRef],List[RRef]]: """ Scans `keep_paths` list for references to Pylightnix storage. Ignores unrelated filesystem objects. """ keep_drefs:List[DRef]=[] keep_rrefs:List[RRef]=[] def _check(f:str): nonlocal keep_drefs, keep_rrefs if islink(a): rref=path2rref(a) if rref is not None: keep_rrefs.append(rref) else: dref=path2dref(a) if dref is not None: keep_drefs.append(dref) for path in keep_paths: if islink(path): _check(path) elif isdir(path): for root, dirs, filenames in walk(path, topdown=True): for dirname in sorted(dirs): a=Path(abspath(join(root, dirname))) _check(a) else: pass return keep_drefs,keep_rrefs
def shell(r: Union[Build, RRef, DRef, Path, str, None] = None) -> None: """ Open the Unix Shell in the directory associated with the argument passed. Path to the shell executable is read from the `SHELL` environment variable, defaulting to `/bin/sh`. If `r` is None, open the shell in the root of the Pylightnix storage. The function is expected to be run in REPL Python shells like IPython. """ cwd: str if r is None: import pylightnix.core cwd = pylightnix.core.PYLIGHTNIX_STORE elif isrref(r): cwd = store_rref2path(RRef(r)) elif isdref(r): cwd = store_dref2path(DRef(r)) elif isinstance(r, Build): assert len(r.outgroups) > 0, ( "Shell function requires at least one build output path to be defined" ) cwd = r.outgroups[0][Tag('out')] elif isdir(r): cwd = str(r) elif isfile(r): cwd = dirname(str(r)) else: assert False, ( f"Expecting `RRef`, `DRef`, a directory or file path (either a string or " f"a `Path`), or None. Got {r}") Popen([environ.get('SHELL', '/bin/sh')], shell=False, cwd=cwd).wait()
def alldrefs(S=None) -> Iterable[DRef]: """ Iterates over all derivations of the storage located at `S` (PYLIGHTNIX_STORE env is used by default) """ store_path_ = storage(S) for dirname in listdir(store_path_): if dirname[-4:] != '.tmp' and isdir(join(store_path_, dirname)): yield mkdref(HashPart(dirname[:32]), Name(dirname[32 + 1:]))
def mksymlink(rref: RRef, tgtpath: Path, name: str, withtime: bool = True, S=None) -> Path: """ A wrapper for `linkrref`, for backward compatibility """ assert isdir(tgtpath), f"Target link directory doesn't exist: '{tgtpath}'" return linkrref(rref, destdir=tgtpath, name=name, withtime=withtime, S=S)
def drefrrefs(dref: DRef, S=None) -> List[RRef]: """ Iterate over all realizations of a derivation `dref`. The sort order is unspecified. Matching is not taken into account. """ (dhash, nm) = undref(dref) drefpath = store_dref2path(dref, S) rrefs: List[RRef] = [] for f in listdir(drefpath): if f[-4:] != '.tmp' and isdir(join(drefpath, f)): rrefs.append(mkrref(HashPart(f), dhash, nm)) return rrefs
def dirhash(path: Path, verbose: bool = False) -> Hash: """ Calculate recursive SHA256 hash of a directory. Ignore files with names starting with underscope ('_'). For symbolic links, hash the result of `readlink(link)`. FIXME: Include file/directory names the into hash data. FIXME: Figure out how does sha265sum handle symlinks and do the same thing. FIXME: Stop loading whole files in memory for calculating hashes """ assert isdir(path), f"dirhash() expects directory path, not '{path}'" def _iter() -> Iterable[Tuple[str, bytes]]: for root, dirs, filenames in walk(abspath(path), topdown=True): for filename in sorted(filenames): if len(filename) > 0 and filename[0] != '_': localpath = abspath(join(root, filename)) if islink(localpath): yield (f'link:{localpath}', encode(readlink(localpath))) with open(localpath, 'rb') as f: yield (localpath, f.read()) return datahash(_iter(), verbose=verbose)
def lsdref_(r: DRef) -> Iterable[str]: p = store_dref2path(r) for d in listdir(p): p2 = join(d, p) if isdir(p2): yield d
def assert_promise_fulfilled(k: str, p: PromisePath, o: Path) -> None: ppath = join(o, *p[1:]) assert isfile(ppath) or isdir(ppath) or islink(ppath), ( f"Promise '{k}' of {p[0]} is not fulfilled. " f"{ppath} is expected to be a file or a directory.")
def mkrealization(dref: DRef, l: Context, o: Path, leader: Optional[Tuple[Tag, RRef]] = None, S=None) -> RRef: """ Create the [Realization](#pylightnix.types.RRef) object in the storage `S`. Return new Realization reference. Parameters: - `dref:DRef`: Derivation reference to create the realization of. - `l:Context`: Context which stores dependency information. - `o:Path`: Path to temporal (build) folder which contains artifacts, prepared by the [Realizer](#pylightnix.types.Realizer). - `leader`: Tag name and Group identifier of the Group leader. By default, we use name `out` and derivation's own rref. FIXME: Assert or handle possible but improbable hash collision[*] FIXME: Consider(not sure) writing group.json for all realizations[**] """ c = store_config(dref, S) assert_valid_config(c) (dhash, nm) = undref(dref) assert isdir(o), ( f"While realizing {dref}: Outpath is expected to be a path to existing " f"directory, but got {o}") for fn in PYLIGHTNIX_RESERVED: assert not isfile(join(o, fn)), ( f"While realizing {dref}: output folder '{o}' contains file '{fn}'. " f"This name is reserved, please use another name. List of reserved " f"names: {PYLIGHTNIX_RESERVED}") with open(reserved(o, 'context.json'), 'w') as f: f.write(context_serialize(l)) if leader is not None: # [**] tag, group_rref = leader with open(reserved(o, 'group.json'), 'w') as f: json_dump({'tag': tag, 'group': group_rref}, f) rhash = dirhash(o) rref = mkrref(trimhash(rhash), dhash, nm) rrefpath = store_rref2path(rref, S) rreftmp = Path(rrefpath + '.tmp') replace(o, rreftmp) dirchmod(rreftmp, 'ro') try: replace(rreftmp, rrefpath) except OSError as err: if err.errno == ENOTEMPTY: # Folder name contain the hash of the content, so getting here # probably[*] means that we already have this object in storage so we # just remove temp folder. dirrm(rreftmp, ignore_not_found=False) else: # Attempt to roll-back dirchmod(rreftmp, 'rw') replace(rreftmp, o) raise return rref