def test_realize_readonly() -> None: with setup_storage2('test_realize_readonly') as (T, S): rref1 = realize(instantiate(mkstage, {'a': '1'}, S=S)) try: with open(join(store_rref2path(rref1, S), 'newfile'), 'w') as f: f.write('foo') raise ShouldHaveFailed('No write-protection??') except OSError: pass try: rmtree(store_rref2path(rref1, S)) raise ShouldHaveFailed('No remove-protection??') except OSError: pass def _realize(b: Build): with open(join(build_outpath(b), 'exe'), 'w') as f: f.write('#!/bin/sh\necho "Fooo"') chmod(join(build_outpath(b), 'exe'), S_IWRITE | S_IREAD | S_IEXEC) rref2 = realize( instantiate(mkdrv, Config({}), match_some(), build_wrapper(_realize), S=S)) assert pipe_stdout([join(store_rref2path(rref2,S),'exe')])=='Fooo\n', \ "Did we lost exec permission?"
def test_dirhash3(d) -> None: with setup_storage('dirhash3') as path: with open(join(path, 'a'), 'w') as f: f.write(str(d)) p = run([SHA256SUM, join(path, 'a')], stdout=-1, check=True, cwd=path) h = dirhash(path) assert (p.stdout[:len(h)].decode('utf-8')) == h
def test_dirhash4(d) -> None: with setup_storage('dirhash4') as path: with open(join(path, 'a'), 'w') as f: f.write(str(d)) dh = dirhash(path) fh = filehash(Path(join(path, 'a'))) assert dh == fh, "Hash of a 1-file dir should match the hash of the file"
def test_mklogdir1() -> None: with setup_storage('mklogdir1') as path: logdir = mklogdir(tag='testtag', logrootdir=path) assert isdir(logdir) logdir = mklogdir(tag='testtag', logrootdir=path, subdirs=['a', 'b']) assert isdir(join(logdir, 'a')) assert isdir(join(logdir, 'b'))
def test_fetchurl(): with setup_storage('test_fetchurl'): with TemporaryDirectory() as tmp: mockwget = join(tmp, 'mockwget') mockdata = join(tmp, 'mockdata') with open(mockdata, 'w') as f: f.write('blala') system(f'tar -zcvf {mockdata}.tar.gz {mockdata}') with open(mockwget, 'w') as f: f.write(f"#!/bin/sh\n") f.write(f"mv {mockdata}.tar.gz $3\n") chmod(mockwget, stat(mockwget).st_mode | S_IEXEC) wanted_sha256 = pipe_stdout([SHA256SUM, f"{mockdata}.tar.gz"]).split()[0] import pylightnix.stages.fetch oldwget = pylightnix.stages.fetch.WGET try: pylightnix.stages.fetch.WGET = lambda: mockwget clo = instantiate(fetchurl, url='mockwget://result.tar.gz', filename='validname.tar.gz', sha256=wanted_sha256) rref = realize(clo) finally: pylightnix.stages.fetch.WGET = oldwget
def test_mksymlink() -> None: with setup_storage2('test_mksymlink') as (T, S): tp = T def _setting1(m: Manager) -> DRef: return mkstage(m, {'a': '1'}, lambda i, tag: 33, buildtime=False) def _setting2(m: Manager) -> DRef: return mkstage(m, {'a': '1'}, lambda i, tag: 42, buildtime=True) clo = instantiate(_setting1, S=S) rref = realize(clo) clo2 = instantiate(_setting2, S=S) rref2 = realize(clo2, force_rebuild=True) assert clo.dref == clo2.dref assert rref2 != rref s = mksymlink(rref, tgtpath=tp, name='thelink', S=S) assert islink(s) assert not isfile(join(s, '__buildtime__.txt')) assert tp in s s2 = mksymlink(rref2, tgtpath=tp, name='thelink', S=S) assert islink(s2) assert isfile(join(s2, '__buildtime__.txt')) assert tp in s assert s2 != s, "s2 should have timestamp" s3 = mksymlink(rref2, tgtpath=tp, name='thelink', withtime=False, S=S) assert s3 == s
def test_repl_override(): with setup_storage('test_repl_override'): n1: DRef n2: DRef def _setting(m: Manager) -> DRef: nonlocal n1, n2 n1 = mkstage(m, {'a': '1'}, lambda i, tag: 33) n2 = mkstage(m, {'maman': n1}, lambda i, tag: 42) return n2 clo = instantiate(_setting) rh = repl_realize(clo, force_interrupt=[n1]) assert rh.dref == n1 b = repl_build(rh) with open(join(build_outpath(b), 'artifact'), 'w') as f: f.write('777') repl_continue(b.outgroups, rh=rh) rref = repl_rref(rh) assert rref is not None rrefn1 = store_deref(rref, n1)[Tag('out')] assert tryread(Path(join(store_rref2path(rrefn1), 'artifact'))) == '777'
def _realize(S: SPath, dref: DRef, context: Context, ra: RealizeArg) -> List[Dict[Tag, Path]]: b = mkbuild(S, dref, context, buildtime=buildtime) grps = build_setoutgroups(b, tagset) for i, g in enumerate(grps): assert tag_out() in g.keys() for tag, o in g.items(): with open(join(o, 'artifact'), 'w') as f: f.write(str(nondet(i, tag))) with open(join(o, 'group'), 'w') as f: f.write(str(i)) with open(join(o, 'tag'), 'w') as f: f.write(str(tag)) return b.outgroups
def test_dirhash() -> None: with setup_storage('dirhash') as path: h1 = dirhash(path) assert_valid_hash(h1) with open(join(path, '_a'), 'w') as f: f.write('1') h2 = dirhash(path) assert_valid_hash(h2) assert h1 == h2, "Test expected to ignore files starting with underscope" with open(join(path, 'a'), 'w') as f: f.write('1') h3 = dirhash(path) assert_valid_hash(h3) assert h3 != h2
def test_linkrrefs() -> None: with setup_storage2('test_linkrefs') as (T, S): s1 = partial(mkstage, config={ 'name': '1', 'promise': [promise, 'artifact'] }) rref1 = realize(instantiate(s1, S=S)) l = linkrrefs([rref1, rref1], destdir=S, S=S) assert len(l) == 2 assert str(l[0]) == join(S, 'result-1') assert islink(join(S, 'result-1')) l = linkrrefs([rref1], destdir=S, withtime=True, S=S) assert S in l[0]
def test_mklogdir2(strtag, timetag) -> None: with setup_storage('mklogdir2') as path: linkpath = join(path, f'_{strtag}_latest') logdir = mklogdir(tag=strtag, logrootdir=Path(path), timetag=timetag) open(join(logdir, 'a'), 'w').write('a') assert isdir(logdir) assert islink(linkpath) assert isfile(join(linkpath, 'a')) logdir2 = mklogdir(tag=strtag, logrootdir=Path(path), timetag=timetag + '2') open(join(logdir2, 'b'), 'w').write('b') assert isdir(logdir2) assert islink(linkpath) assert isfile(join(linkpath, 'b'))
def test_fetchlocal(): with setup_storage('test_fetclocal') as tmp: mockdata = join(tmp, 'mockdata') with open(mockdata, 'w') as f: f.write('dogfood') system(f"tar -C '{tmp}' -zcvf {tmp}/mockdata.tar.gz mockdata") wanted_sha256 = pipe_stdout([SHA256SUM, "mockdata.tar.gz"], cwd=tmp).split()[0] rref = realize( instantiate(fetchlocal, path=mockdata + '.tar.gz', filename='validname.tar.gz', sha256=wanted_sha256)) assert isrref(rref) assert isfile(join(store_rref2path(rref), 'mockdata'))
def test_fetchlocal2(): with setup_storage('test_fetclocal') as tmp: mockdata = join(tmp, 'mockdata') with open(mockdata, 'w') as f: f.write('dogfood') wanted_sha256 = pipe_stdout([SHA256SUM, "mockdata"], cwd=tmp).split()[0] rref = realize( instantiate(fetchlocal, path=mockdata, sha256=wanted_sha256, mode='as-is')) assert isrref(rref) assert isfile(join(store_rref2path(rref), 'mockdata')) assert isfile(mklens(rref).out_path.syspath)
def test_mkfile() -> None: with setup_storage('test_mkfile'): def _setting(m: Manager, nm) -> DRef: return mkfile(m, Name('foo'), bytes((nm or 'bar').encode('utf-8')), nm) rref1 = realize(instantiate(_setting, None)) with open(join(store_rref2path(rref1), 'foo'), 'r') as f: bar = f.read() assert bar == 'bar' rref2 = realize(instantiate(_setting, 'baz')) with open(join(store_rref2path(rref2), 'baz'), 'r') as f: baz = f.read() assert baz == 'baz' assert rref1 != rref2
def setup_storage(tn: str): # We reset STORE variables to prevent interaction with production store import pylightnix.core pylightnix.core.PYLIGHTNIX_STORE = None # type:ignore pylightnix.core.PYLIGHTNIX_TMP = None # type:ignore storepath = Path(join(gettempdir(), tn)) try: dirchmod(storepath, 'rw') rmtree(storepath) except FileNotFoundError: pass store_initialize(custom_store=storepath, custom_tmp=join(gettempdir(), 'pylightnix_tmp')) assert 0 == len(listdir(storepath)) try: yield storepath finally: # print('Setting PYLIGHTNIX_STORE to none') pylightnix.core.PYLIGHTNIX_STORE = None # type:ignore pylightnix.core.PYLIGHTNIX_TMP = None # type:ignore
def test_repl_globalHelper(): with setup_storage('test_repl_globalHelper'): n1: DRef n2: DRef def _setting(m: Manager) -> DRef: nonlocal n1, n2 n1 = mkstage(m, {'a': '1'}) n2 = mkstage(m, {'maman': n1}) return n2 rh = repl_realize(instantiate(_setting), force_interrupt=True) assert repl_rref(rh) is None b = repl_build() with open(join(build_outpath(b), 'artifact.txt'), 'w') as f: f.write("Fooo") repl_continueBuild(b) rref = repl_rref(rh) assert rref is not None assert isfile(join(store_rref2path(rref), 'artifact.txt'))
def _realize(b: Build): o = build_outpath(b) c = build_cattrs(b) assert b.dref in c.promise assert n1 in store_cattrs(c.maman, S).promise assert build_path(b, c.promise) == join(o, 'uber-artifact') assert build_path(b, store_cattrs(c.maman, S).promise) == build_path( b, c.maman_promise) if fullfill: with open(build_path(b, c.promise), 'w') as f: f.write('chickenpoop')
def _match(S: SPath, dref: DRef, context: Context) -> Optional[List[RRefGroup]]: # Get the available groups grps = store_rrefs(dref, context, S) # Sort the output groups by the value of artifact values = list( sorted([(maybereadstr( join(store_rref2path(gr[tag_out()], S), 'artifact'), '0', int), gr) for gr in grps], key=lambda x: x[0])) # Return `top-n` matched groups return [tup[1] for tup in values[-nmatch:]] if len(values) > 0 else None
def test_mknode_with_artifacts(d, a) -> None: with setup_storage('test_mknode_with_artifacts'): def _setting(m: Manager) -> DRef: return mknode(m, config_dict=d, artifacts=a) cl = instantiate(_setting) assert len(cl.derivations) == 1 rref = realize(instantiate(_setting)) for nm, val in a.items(): assert isfile(join(store_rref2path(rref),nm)), \ f"RRef {rref} doesn't contain artifact {nm}"
def setup_storage2(tn: str): # We reset STORE variables to prevent interaction with production store import pylightnix.core pylightnix.core.PYLIGHTNIX_TMP = None # type:ignore pylightnix.core.PYLIGHTNIX_STORE = None # type:ignore assert len(tn) > 0 testroot = Path(join(gettempdir(), 'pylightnix', tn)) storepath = Path(join(testroot, storagename())) tmppath = Path(join(testroot, 'tmp')) try: dirchmod(testroot, 'rw') rmtree(testroot) except FileNotFoundError: pass # store_initialize(custom_store=storepath, custom_tmp=gettempdir()) makedirs(storepath, exist_ok=False) makedirs(tmppath, exist_ok=False) pylightnix.core.PYLIGHTNIX_TMP = tmppath # type:ignore assert 0 == len(listdir(storepath)) try: yield tmppath, storepath finally: pylightnix.core.PYLIGHTNIX_STORE = None # type:ignore pylightnix.core.PYLIGHTNIX_TMP = None # type:ignore
def test_store_initialize() -> None: with setup_storage('test_store_initialize') as p: import pylightnix.core try: pylightnix.core.PYLIGHTNIX_TMP = join(p, 'tmp') pylightnix.core.PYLIGHTNIX_STORE = join(p, 'store') store_initialize(custom_store=None, custom_tmp=None) assert isdir(join(p, 'tmp')) assert isdir(join(p, 'store')) store_initialize(custom_store=None, custom_tmp=None) assert isdir(join(p, 'tmp')) assert isdir(join(p, 'store')) finally: pylightnix.core.PYLIGHTNIX_TMP = None # type:ignore pylightnix.core.PYLIGHTNIX_STORE = None # type:ignore
def test_pack2(stages) -> None: archives = [] with setup_storage2('test_pack_src') as (T1, S1): for nstage, stage in enumerate(stages): rgs = realizeGroups(instantiate(stage, S=S1)) for ngroup, rg in enumerate(rgs): ap = Path(join(T1, f'archive_{nstage:02d}_{ngroup:02d}.zip')) print(f'Packing {ap}') pack(groups2rrefs([rg]), ap, S=S1) archives.append(ap) print('PACK done') # set_trace() with setup_storage2('test_pack_dst') as (T2, S2): for ap in archives: print(f'Unpacking {ap}') unpack(Path(ap), S=S2) assert set(alldrefs(S=S1)) == set(alldrefs(S=S2)) assert set(allrrefs(S=S1)) == set(allrrefs(S=S2)) print('OK!')
def test_pack1() -> None: with setup_storage2('test_pack1') as (T, S): def _stage(m): s1 = mkstage(m, {'name': '1', 'promise': [promise, 'artifact']}) s2 = mkstage(m, { 'name': '2', 'maman': s1, 'promise': [promise, 'artifact'] }) s3 = mkstage(m, { 'name': '3', 'papa': s2, 'promise': [promise, 'artifact'] }) return s3 rref3 = realize(instantiate(_stage, S=S)) arch_path = Path(join(T, 'archive.zip')) pack([rref3], arch_path, S=S) unpack(arch_path, S=S) assert isfile(arch_path)
def test_overwrite_realizer() -> None: with setup_storage2('test_overwrite_realizer') as (T, S): n1: DRef n2: DRef n3: DRef n4: DRef def _setting(m: Manager) -> DRef: nonlocal n1, n2, n3, n4 n1 = mkstage(m, {'a': '1'}, lambda i, t: 33) n2 = mkstage(m, {'maman': n1}) n3 = mkstage(m, {'a': '1'}, lambda i, t: 42) assert n1 == n3 return n2 rref_n2 = realize(instantiate(_setting, S=S)) all_drefs = list(alldrefs(S)) assert len(all_drefs) == 2 rref_n3 = store_deref(rref_n2, store_cattrs(rref_n2, S).maman, S)[Tag('out')] assert open(join(store_rref2path(rref_n3, S), 'artifact'), 'r').read() == '42'
def test_shellref(): with setup_storage('test_shellref') as s: with TemporaryDirectory() as tmp: mockshell=join(tmp,'mockshell') with open(mockshell,'w') as f: f.write(f"#!/bin/sh\n") f.write(f"pwd\n") chmod(mockshell, stat(mockshell).st_mode | S_IEXEC) environ['SHELL']=mockshell rref=realize(instantiate(mkstage, {'a':1})) shellref(rref) shellref(rref2dref(rref)) shellref() shell(store_rref2path(rref)) repl_realize(instantiate(mkstage, {'n':1}), force_interrupt=True) b=repl_build() o=build_outpath(b) shell(b) repl_cancelBuild(b) try: shellref('foo') # type:ignore raise ShouldHaveFailed('shellref should reject garbage') except AssertionError: pass
def _realize(b: Build): with open(join(build_outpath(b), 'exe'), 'w') as f: f.write('#!/bin/sh\necho "Fooo"') chmod(join(build_outpath(b), 'exe'), S_IWRITE | S_IREAD | S_IEXEC)
def test_readwrite(d) -> None: with setup_storage('test_readwrite') as p: f = join(p, 'testfile.json') writejson(f, d) assert str(readjson(f)) == str(d)