def test_store_initialize() -> None: with setup_storage('test_store_initialize') as p: import pylightnix.core try: pylightnix.core.PYLIGHTNIX_TMP = join(p, 'tmp') pylightnix.core.PYLIGHTNIX_STORE = join(p, 'store') store_initialize(custom_store=None, custom_tmp=None) assert isdir(join(p, 'tmp')) assert isdir(join(p, 'store')) store_initialize(custom_store=None, custom_tmp=None) assert isdir(join(p, 'tmp')) assert isdir(join(p, 'store')) finally: pylightnix.core.PYLIGHTNIX_TMP = None # type:ignore pylightnix.core.PYLIGHTNIX_STORE = None # type:ignore
def setup_storage(tn: str): # We reset STORE variables to prevent interaction with production store import pylightnix.core pylightnix.core.PYLIGHTNIX_STORE = None # type:ignore pylightnix.core.PYLIGHTNIX_TMP = None # type:ignore storepath = Path(join(gettempdir(), tn)) try: dirchmod(storepath, 'rw') rmtree(storepath) except FileNotFoundError: pass store_initialize(custom_store=storepath, custom_tmp=join(gettempdir(), 'pylightnix_tmp')) assert 0 == len(listdir(storepath)) try: yield storepath finally: # print('Setting PYLIGHTNIX_STORE to none') pylightnix.core.PYLIGHTNIX_STORE = None # type:ignore pylightnix.core.PYLIGHTNIX_TMP = None # type:ignore
from pylightnix import Path, store_initialize, dirrm dirrm(Path('/tmp/pylightnix_mnist_demo')) store_initialize(custom_store='/tmp/pylightnix_mnist_demo', custom_tmp='/tmp') from pylightnix import DRef, instantiate_inplace, fetchurl mnist_dataset:DRef = \ instantiate_inplace( fetchurl, name='mnist', mode='as-is', url='https://storage.googleapis.com/tensorflow/tf-keras-datasets/mnist.npz', sha256='731c5ac602752760c8e48fbffcf8c3b850d9dc2a2aedcf2cc48468fc17b673d1') print(mnist_dataset) from pylightnix import Config, RefPath, PromisePath, mkconfig, promise def mnist_config()->Config: learning_rate = 1e-3 num_epoches = 1 dataset:RefPath = [mnist_dataset, 'mnist.npz'] accuracy:PromisePath = [promise, 'accuracy.txt'] return mkconfig(locals()) from pylightnix import match_latest def mnist_match(): return match_latest()
from os.path import join from numpy import load as np_load from tensorflow.keras.models import ( Sequential ) from tensorflow.keras.layers import ( Conv2D, MaxPool2D, Dropout, Flatten, Dense ) from tensorflow.keras.utils import ( to_categorical ) from tensorflow.keras.backend import image_data_format from tensorflow.keras.callbacks import ModelCheckpoint from pylightnix import ( Matcher, Build, Path, RefPath, Config, Manager, RRef, DRef, Context, build_path, build_outpath, build_cattrs, mkdrv, rref2path, mkconfig, mkbuild, match_best, build_wrapper_, tryread, fetchurl, store_initialize, realize, instantiate ) from typing import Any store_initialize() def fetchmnist(m:Manager)->DRef: return \ fetchurl(m, name='mnist', mode='as-is', url='https://storage.googleapis.com/tensorflow/tf-keras-datasets/mnist.npz', sha256='731c5ac602752760c8e48fbffcf8c3b850d9dc2a2aedcf2cc48468fc17b673d1') class Model(Build): model:Sequential x_train:Any y_train:Any x_test:Any y_test:Any
from ultimatum.base import run1 from pylightnix import ( Config, Manager, Build, DRef, RRef, ConfigAttrs, mkdrv, instantiate, realizeMany, build_cattrs, build_wrapper, match_all, build_outpaths, Path, config_dict, build_config, store_initialize, match_only, build_paths, build_outpath, realize ) import matplotlib.pyplot as plt from multiprocessing.pool import Pool from typing import List, Optional from json import loads as json_loads, dumps as json_dumps from os import chdir store_initialize('/tmp/ultimatum', '/tmp') def _build_process(a:ConfigAttrs, o:Path): run1(cwd=o, nepoch=a.nepoch, n=a.n, nrounds=a.nrounds, cutoff=a.cutoff) def breed_node(m:Manager)->DRef: def _config()->Config: name='ultimatum' nepoch=30000 n=300 nrounds=10*30 cutoff=0.1 version=6 nrunners=10 return Config(locals()) def _build(b:Build)->None: c=build_cattrs(b) p=Pool() p.starmap(_build_process,[(c,o) for o in build_outpaths(b,nouts=c.nrunners)],1)