def stage_datasetS(m: Manager, index: int = 0, Nvalid: int = 5000, npoints: int = 100) -> DRef: """ Creates stabilized dataset """ def _config(): name = 'dataset3' nonlocal index, npoints N = int(5 * Nvalid / 3) min_allow_size = Nvalid out_examples = [promise, 'examples.bin'] out_barplots = [promise, 'barplots'] out_traceplot = [promise, 'traceplot.png'] out_inputs = [promise, 'inputs.json'] version = 2 return locals() def _make(b: Build): build_setoutpaths(b, 1) rref = run_dataset2(what=2, maxitems=mklens(b).N.val, index=index, interactive=False) assert isfile(mklens(rref).ref_df.examples.syspath) system( f"cp {mklens(rref).ref_df.ref_data.inputs.syspath} {mklens(b).out_inputs.syspath}" ) df = pd.read_csv(mklens(rref).df.syspath) makedirs(mklens(b).out_barplots.syspath) allowed = stabilize( df, npoints=mklens(b).npoints.val, path_barplot=lambda i: join( mklens(b).out_barplots.syspath, f'_plot_{i:03d}.png'), path_traceplot=mklens(b).out_traceplot.syspath, min_allow_size=mklens(b).min_allow_size.val) with open(mklens(rref).ref_df.examples.syspath, 'rb') as f: with open(mklens(b).out_examples.syspath, 'wb') as f_o: _read = fd2examples(f) _write = examples2fd(f_o) try: idx = 0 while True: example = _read() if idx in allowed: _write(example) idx += 1 except KeyboardInterrupt: raise except Exception as e: print(e) pass return mkdrv(m, mkconfig(_config()), match_only(), build_wrapper(_make))
def stage_df(m: Manager, ref_data: DRef): def _config(): nonlocal ref_data name = mklens(ref_data).name.val + '-df' examples = mklens(ref_data).out_examples.refpath out_df = [promise, 'df.csv'] return locals() def _make(b: Build): build_setoutpaths(b, 1) df = examples_dataframe(mklens(b).examples.syspath) df.to_csv(mklens(b).out_df.syspath) return mkdrv(m, mkconfig(_config()), match_only(), build_wrapper(_make))
def stage_inputs(m:Manager, num_inputs:int=4, batch_size:int=5, index:int=1)->DRef: range_min = -100 range_max = 100 def _config(): name = 'dataset-inputs' nonlocal num_inputs, batch_size, range_min, range_max, index out_inputs = [promise, 'inputs.json'] version = 4 return locals() def _make(b:Build): build_setoutpaths(b, 1) builtin_inputs:dict={ # Ref("i0"):IVal(0), # Ref("i1"):IVal(1) } IMEMs:List[IMem] = [mkmap(_union(builtin_inputs, {Ref(f"i{i+len(builtin_inputs.keys())}"): IVal(randint(range_min,range_max)) for i in range(num_inputs)})) for _ in range(batch_size)] writejson(mklens(b).out_inputs.syspath, [imem2json(M) for M in IMEMs]) return mkdrv(m, mkconfig(_config()), match_only(), build_wrapper(_make))
def stage_dataset1(m:Manager, ref_inputs:DRef)->DRef: Wdef = 5 lib_impl = lib_arith lib_methods = [mn.val for mn in lib_impl.keys()] time2run_sec = int(0.5*60) def _config(): nonlocal Wdef,lib_methods,time2run_sec num_inputs = mklens(ref_inputs).num_inputs.val batch_size = mklens(ref_inputs).batch_size.val inputs = mklens(ref_inputs).out_inputs.refpath out_examples = [promise, 'examples.bin'] version = ['0'] return locals() def _make(b:Build): build_setoutpaths(b, 1) WLIB = mkwlib(lib_impl, Wdef) IMEMs = [json2imem(j) for j in readjson(mklens(b).inputs.syspath)] print(f"Inputs: {IMEMs}") i = 0 acc:List[Expr] = [] g = genexpr(WLIB, IMEMs) written_bytes = 0 time_start = time() with open(mklens(b).out_examples.syspath,'wb') as f: _add=examples2fd(f) while time()<time_start+mklens(b).time2run_sec.val: r,mem,vals,w = next(g) ival = vals[0] assert isinstance(ival[r], IVal) expr = gather(r,mem) acc.append(expr) i += 1 for j in range(len(IMEMs)): written_bytes+=_add(Example(IMEMs[j],expr,vals[j][r])) if i%300 == 0: print(f".. i {i} W {w} LAST_REF {r} WRBYTES {written_bytes}.. ") return mkdrv(m, mkconfig(_config()), match_only(), build_wrapper(_make))
def summarize_node(m:Manager)->DRef: def _config()->Config: return Config({ 'name':'analyzer', 'version':3, 'history':[breed_node(m), 'history.json'] }) def _build(b:Build)->None: chdir(build_outpath(b)) c=build_cattrs(b) fig=plt.figure() ax=fig.add_subplot(1, 1, 1) ax.set_ylabel('mean strategies') ax.grid() for nhist,histpath in enumerate(build_paths(b, c.history)): epoches:List[float]=[]; pmeans:List[float]=[]; rmeans:List[float]=[] with open(histpath,'r') as f: epoches,pmeans,rmeans=json_loads(f.read()) ax.plot(epoches,pmeans,label=f'pmeans{nhist}',color='blue') ax.plot(epoches,rmeans,label=f'rmeans{nhist}',color='orange') plt.savefig('figure.png') return mkdrv(m, config=_config(), matcher=match_only(), realizer=build_wrapper(_build))
def hello_realize(b: Build) -> None: c: Any = build_cattrs(b) o: Path = build_outpath(b) with TemporaryDirectory() as tmp: copytree(mklens(b).src.syspath, join(tmp, 'src')) dirrw(Path(join(tmp, 'src'))) cwd = getcwd() try: chdir(join(tmp, 'src')) system(f'./configure --prefix=/usr') system(f'make') system(f'make install DESTDIR={o}') finally: chdir(cwd) from pylightnix import mkdrv, build_wrapper, match_only hello:DRef = \ instantiate_inplace(mkdrv, hello_config(), match_only(), build_wrapper(hello_realize)) print(hello) rref: RRef = realize_inplace(hello) print(rref) hello_bin = join(rref2path(rref), 'usr/bin/hello') print(Popen([hello_bin], stdout=PIPE, shell=True).stdout.read()) # type:ignore
def stage_dataset2(m:Manager, ref_inputs:DRef, gather_depth:Optional[int]=999, Wdef:int=5, maxtime_sec:Optional[int]=None, maxitems:Optional[int]=None)->DRef: lib_impl = lib_arith lib_methods = [mn.val for mn in lib_impl.keys()] assert (maxtime_sec is not None) or (maxitems is not None), \ "At least one stop criteria should be defined" time2run_sec:int = maxsize if maxtime_sec is None else maxtime_sec maxitems:int = maxsize if maxitems is None else maxitems def _config(): nonlocal Wdef,lib_methods,time2run_sec,maxitems,gather_depth num_inputs = mklens(ref_inputs).num_inputs.val batch_size = mklens(ref_inputs).batch_size.val name=f'dataset2-ni{num_inputs}-bs{batch_size}-Wd{Wdef}-mi{maxitems}' inputs = mklens(ref_inputs).out_inputs.refpath out_examples = [promise, 'examples.bin'] version = ['21'] return locals() def _make(b:Build): build_setoutpaths(b, 1) WLIB = mkwlib(lib_impl, Wdef) IMEMs = [json2imem(j) for j in readjson(mklens(b).inputs.syspath)] print(f"Inputs: {IMEMs}") i = 0 # acc:List[Expr] = [] g = genexpr(WLIB, IMEMs) written_bytes = 0 written_items = 0 time_start = time() acci=set() hb=time() with open(mklens(b).out_examples.syspath,'wb') as f: _add=examples2fd(f) while time()<time_start+mklens(b).time2run_sec.val and \ written_items<mklens(b).maxitems.val: # gt0=time() r,mem,imems,exprw = next(g) # gt1=time() # print('gen time', gt1-gt0) assert isinstance(imems[0][r], IVal), f"{imems[0][r]}" i += 1 gi = 0 gexpr = gengather(r,mem) # gg0=time() exprs=[] for expr in gexpr: exprs.append(expr) if gather_depth is not None: exprs=exprs[-gather_depth:] else: exprs=[exprs[randint(0,len(exprs)-1)]] for expr in exprs: er=extrefs(expr) ds=decls(expr) # acc.append(expr) for j in range(len(IMEMs)): if len(ds)>0: # print(gi, j, list(inps.keys()), print_expr(expr)) inps:IMem = TMap({k:imems[j][k] for k in er}) acci |= set(inps.values()) written_bytes+=_add(Example(inps,expr,imems[j][r])) written_items+=1 hb2=time() if written_items%100 == 0: print(f".. NW {written_items} W {exprw[r]} DEP {depth(expr)} " f"LAST_REF {r} WRBYTES {written_bytes // (1024) }K " f"INPSZ {len(acci)} TIME {hb2-hb} " f"VM {virtual_memory().used // 1024 // 1024}M") hb=hb2 gi+=1 # gg1=time() # print('gather time', gg1-gg0) return mkdrv(m, mkconfig(_config()), match_only(), build_wrapper(_make))