def _demodata_files(dpath=None, num_files=10, pool_size=3, size_pool=None): def _random_data(rng, num): return ''.join([rng.choice(string.hexdigits) for _ in range(num)]) def _write_random_file(dpath, part_pool, size_pool, rng): namesize = 16 # Choose 1, 4, or 16 parts of data num_parts = rng.choice(size_pool) chunks = [rng.choice(part_pool) for _ in range(num_parts)] contents = ''.join(chunks) fname_noext = _random_data(rng, namesize) ext = ub.hash_data(contents)[0:4] fname = '{}.{}'.format(fname_noext, ext) fpath = join(dpath, fname) with open(fpath, 'w') as file: file.write(contents) return fpath if size_pool is None: size_pool = [1, 4, 16] dpath = ub.ensure_app_cache_dir('pfile/random') rng = random.Random(0) # Create a pool of random chunks of data chunksize = 65536 part_pool = [_random_data(rng, chunksize) for _ in range(pool_size)] # Write 100 random files that have a reasonable collision probability fpaths = [ _write_random_file(dpath, part_pool, size_pool, rng) for _ in ub.ProgIter(range(num_files), desc='write files') ] for fpath in fpaths: assert exists(fpath) return fpaths
def test_recursive_eager_autogen(): """ xdoctest ~/code/mkinit/tests/test_with_dummy.py test_recursive_eager_autogen """ import mkinit cache_dpath = ub.ensure_app_cache_dir("mkinit/tests") paths = make_dummy_package(cache_dpath, pkgname="mkinit_rec_eager_autogen", side_effects=True) pkg_path = paths["root"] mkinit.autogen_init(pkg_path, options={"lazy_import": 0}, dry=False, recursive=True) with ub.util_import.PythonPathContext(cache_dpath): import mkinit_rec_eager_autogen print( "mkinit_rec_eager_autogen = {!r}".format(mkinit_rec_eager_autogen)) print( "mkinit_rec_eager_autogen = {!r}".format(mkinit_rec_eager_autogen)) print("mkinit_rec_eager_autogen.good_attr_01 = {!r}".format( mkinit_rec_eager_autogen.good_attr_01)) print("mkinit_rec_eager_autogen.a_very_nested_function = {!r}".format( mkinit_rec_eager_autogen.a_very_nested_function)) print("mkinit_rec_eager_autogen.a_very_nested_function = {!r}".format( mkinit_rec_eager_autogen.a_very_nested_function)) print("mkinit_rec_eager_autogen.a_very_nested_function = {!r}".format( mkinit_rec_eager_autogen.a_very_nested_function)) mkinit_rec_eager_autogen.a_very_nested_function()
def test_restart_lr(): size = 3 datasets = { 'train': nh.data.ToyData2d(size=size, border=1, n=256, rng=0), 'vali': nh.data.ToyData2d(size=size, border=1, n=128, rng=1), } lr = 1.0 hyper = { # --- data first 'datasets' : datasets, 'nice' : 'restart_lr', 'workdir' : ub.ensure_app_cache_dir('netharn/test/restart_lr'), 'loaders' : {'batch_size': 64}, 'xpu' : nh.XPU.cast('cpu'), # --- algorithm second 'model' : (nh.models.ToyNet2d, {}), 'optimizer' : (nh.optimizers.SGD, {'lr': 99}), 'criterion' : (nh.criterions.FocalLoss, {}), 'initializer' : (nh.initializers.NoOp, {}), 'scheduler': (nh.schedulers.ListedLR, { 'points': { 0: lr * 0.10, 1: lr * 1.00, 9: lr * 1.10, 10: lr * 0.10, 13: lr * 0.01, }, 'interpolate': True }), 'dynamics' : {'batch_step': 4}, 'monitor' : (nh.Monitor, {'max_epoch': 13}), } harn = MyHarn(hyper=hyper) harn.config['use_tqdm'] = 0 # Delete previous data harn.initialize(reset='delete') # Cause the harness to fail try: harn.failpoint = 5 harn.run() except Failpoint as ex: pass failpoint_lrs = harn._current_lrs() # Restarting the harness should begin at the same point harn = MyHarn(hyper=hyper) harn.config['use_tqdm'] = 0 harn.initialize() restart_lrs = harn._current_lrs() print('failpoint_lrs = {!r}'.format(failpoint_lrs)) print('restart_lrs = {!r}'.format(restart_lrs)) harn.failpoint = None harn.run() assert restart_lrs == failpoint_lrs
def initial_weights(): """ Weights pretrained trained ImageNet by yolo9000-pytorch """ import os url = 'https://data.kitware.com/api/v1/file/5ab513438d777f068578f1d0/download' dpath = ub.ensure_app_cache_dir('clab/yolo_v2') fname = 'darknet19.weights.npz' dest = os.path.join(dpath, fname) if not os.path.exists(dest): command = 'curl -X GET {} > {}'.format(url, dest) ub.cmd(command, verbout=1, shell=True) # url = 'http://acidalia.kitware.com:8000/weights/darknet19.weights.npz' # npz_fpath = ub.grabdata(url, dpath=ub.ensure_app_cache_dir('clab')) # convert to torch weights npz_fpath = dest torch_fpath = ub.augpath(npz_fpath, ext='.pt') if not os.path.exists(torch_fpath): # hack to transform initial state model = Darknet19(num_classes=20) model.load_from_npz(npz_fpath, num_conv=18) torch.save(model.state_dict(), torch_fpath) # from clab import xpu_device # xpu = xpu_device.XPU('gpu') # xpu.load(torch_fpath) # torch.load(torch_fpath) return torch_fpath
def _demodata_toy_sesssion(workdir, name='demo_session', lr=1e-4): """ workdir = ub.ensure_app_cache_dir('netharn/tests/sessions') workdir """ # This will train a toy model with toy data using netharn import netharn as nh hyper = nh.HyperParams( **{ 'workdir': ub.ensure_app_cache_dir('netharn/tests/sessions'), 'name': name, 'xpu': nh.XPU.coerce('cpu'), 'datasets': { 'train': nh.data.ToyData2d(size=3, rng=0), 'vali': nh.data.ToyData2d(size=3, rng=0) }, 'loaders': { 'batch_size': 64 }, 'model': (nh.models.ToyNet2d, {}), 'optimizer': (nh.optimizers.SGD, { 'lr': lr }), 'criterion': (nh.criterions.FocalLoss, {}), 'initializer': (nh.initializers.KaimingNormal, {}), 'monitor': (nh.Monitor, { 'max_epoch': 1 }), }) harn = nh.FitHarn(hyper) harn.preferences['use_tensorboard'] = False harn.preferences['timeout'] = 1 harn.run() # TODO: make this run faster if we don't need to rerun
def test_download_bad_url(): """ Check that we error when the url is bad CommandLine: python -m ubelt.tests.test_download test_download_bad_url --verbose """ url = 'http://a-very-incorrect-url' if not ub.argflag('--network'): pytest.skip('not running network tests') dpath = ub.ensure_app_cache_dir('ubelt', 'tests') fname = basename(url) fpath = join(dpath, fname) ub.delete(fpath) assert not exists(fpath) # from ubelt.util_download import URLError import six if six.PY2: # nocover from urllib2 import URLError # NOQA else: from urllib.error import URLError # NOQA with pytest.raises(URLError): ub.download(url, fpath=fpath, verbose=1)
def test_hash_file(): fpath = join(ub.ensure_app_cache_dir('ubelt'), 'tmp.txt') ub.writeto(fpath, 'foobar') hashid1_a = ub.hash_file(fpath, hasher='sha512', hashlen=8, stride=1, blocksize=1) hashid2_a = ub.hash_file(fpath, hasher='sha512', hashlen=8, stride=2, blocksize=1) hashid1_b = ub.hash_file(fpath, hasher='sha512', hashlen=8, stride=1, blocksize=10) hashid2_b = ub.hash_file(fpath, hasher='sha512', hashlen=8, stride=2, blocksize=10) assert hashid1_a == hashid1_b assert hashid2_a != hashid2_b, 'blocksize matters when stride is > 1' assert hashid1_a != hashid2_a
def load_cifar(key='cifar10', workdir=None, transform=None): """ key = 'cifar10' load_cifar(key, workdir=None) """ import torchvision import pickle import os if workdir is None: workdir = ub.ensure_app_cache_dir('netharn') if key == 'cifar10': DATASET = torchvision.datasets.CIFAR10 dset = DATASET(root=workdir, download=True, transform=transform) meta_fpath = os.path.join(dset.root, dset.base_folder, 'batches.meta') meta_dict = pickle.load(open(meta_fpath, 'rb')) dset.classes = meta_dict['label_names'] # For some reason the torchvision objects dont have the label names # in the dataset. But the download directory will have them. # classes = [ # 'airplane', 'automobile', 'bird', 'cat', 'deer', 'dog', 'frog', # 'horse', 'ship', 'truck', # ] elif key == 'cifar100': DATASET = torchvision.datasets.CIFAR100 dset = DATASET(root=workdir, download=True, transform=transform) meta_fpath = os.path.join(dset.root, dset.base_folder, 'meta') meta_dict = pickle.load(open(meta_fpath, 'rb')) dset.classes = meta_dict['fine_label_names'] return dset
def test_broken_link(): """ CommandLine: python -m ubelt.tests.test_links test_broken_link """ dpath = ub.ensure_app_cache_dir('ubelt', 'test_broken_link') ub.delete(dpath, verbose=2) ub.ensuredir(dpath, verbose=2) util_links._dirstats(dpath) broken_fpath = join(dpath, 'broken_fpath.txt') broken_flink = join(dpath, 'broken_flink.txt') ub.touch(broken_fpath, verbose=2) util_links._dirstats(dpath) ub.symlink(broken_fpath, broken_flink, verbose=2) util_links._dirstats(dpath) ub.delete(broken_fpath, verbose=2) util_links._dirstats(dpath) # make sure I am sane that this is the correct check. can_symlink = util_links._can_symlink() print('can_symlink = {!r}'.format(can_symlink)) if can_symlink: # normal behavior assert islink(broken_flink) assert not exists(broken_flink) else: # on windows hard links are essentially the same file. # there is no trace that it was actually a link. assert exists(broken_flink)
def test_cant_overwrite_file_with_symlink(): if ub.WIN32: # Can't distinguish this case on windows pytest.skip() dpath = ub.ensure_app_cache_dir('ubelt', 'test_cant_overwrite_file_with_symlink') ub.delete(dpath, verbose=2) ub.ensuredir(dpath, verbose=2) happy_fpath = join(dpath, 'happy_fpath.txt') happy_flink = join(dpath, 'happy_flink.txt') for verbose in [2, 1, 0]: print('=======') print('verbose = {!r}'.format(verbose)) ub.delete(dpath, verbose=verbose) ub.ensuredir(dpath, verbose=verbose) ub.touch(happy_fpath, verbose=verbose) ub.touch(happy_flink) # create a file where a link should be util_links._dirstats(dpath) with pytest.raises(FileExistsError): # file exists error ub.symlink(happy_fpath, happy_flink, overwrite=False, verbose=verbose) with pytest.raises(FileExistsError): # file exists error ub.symlink(happy_fpath, happy_flink, overwrite=True, verbose=verbose)
def test_download_bad_url(): """ Check that we error when the url is bad Notes: For some reason this can take a long time to realize there is no URL, even if the timeout is specified and fairly low. CommandLine: python tests/test_download.py test_download_bad_url --verbose """ url = 'http://www.a-very-incorrect-url.gov/does_not_exist.txt' # if not ub.argflag('--network'): # pytest.skip('not running network tests') # Ensure the opener exist import sys if sys.version_info[0] == 2: # nocover # import urllib2 as urllib_x from urllib2 import URLError # NOQA else: # import urllib.request as urllib_x from urllib.error import URLError # NOQA # if urllib_x._opener is None: # urllib_x.install_opener(urllib_x.build_opener()) dpath = ub.ensure_app_cache_dir('ubelt', 'tests') fname = basename(url) fpath = join(dpath, fname) ub.delete(fpath) assert not exists(fpath) with pytest.raises(URLError): ub.download(url, fpath=fpath, verbose=1, timeout=1.0)
def test_grabdata_value_error(): """ Check where the url is downloaded to when fpath is not specified. """ # url = 'http://i.imgur.com/rqwaDag.png' # if not ub.argflag('--network'): # pytest.skip('not running network tests') url = _demo_url() dpath = ub.ensure_app_cache_dir('ubelt') fname = basename(url) fpath = join(dpath, fname) with pytest.raises(ValueError): ub.grabdata(url, fname=fname, fpath=fpath, dpath=dpath) with pytest.raises(ValueError): ub.grabdata(url, fname=fname, fpath=fpath) with pytest.raises(ValueError): ub.grabdata(url, dpath=dpath, fpath=fpath) with pytest.raises(ValueError): ub.grabdata(url, fpath=fpath, appname='foobar') with pytest.raises(ValueError): ub.grabdata(url, dpath=dpath, appname='foobar')
def _demodata_trained_dpath(): # This will train a toy model with toy data using netharn import netharn as nh hyper = nh.HyperParams( **{ 'workdir': ub.ensure_app_cache_dir('netharn/tests/deploy'), 'nice': 'deploy_demo_static', 'xpu': nh.XPU.cast('cpu'), 'datasets': { 'train': nh.data.ToyData2d(size=3, rng=0) }, 'loaders': { 'batch_size': 64 }, 'model': (nh.models.ToyNet2d, {}), 'optimizer': (nh.optimizers.SGD, { 'lr': 0.0001 }), 'criterion': (nh.criterions.FocalLoss, {}), 'initializer': (nh.initializers.KaimingNormal, {}), 'monitor': (nh.Monitor, { 'max_epoch': 1 }), }) harn = nh.FitHarn(hyper) harn.run() # TODO: make this run faster if we don't need to rerun if len(list(glob.glob(join(harn.train_dpath, '*.py')))) > 1: # If multiple models are deployed some hash changed. Need to reset harn.initialize(reset='delete') harn.run() # don't relearn if we already finished this one return harn.train_dpath
def test_lazy_import(): """ python ~/code/mkinit/tests/test_with_dummy.py test_lazy_import """ import pytest import mkinit if sys.version_info[0:2] < (3, 7): pytest.skip('Only 3.7+ has lazy imports') cache_dpath = ub.ensure_app_cache_dir("mkinit/tests") paths = make_dummy_package(cache_dpath) pkg_path = paths["root"] mkinit.autogen_init(pkg_path, options={"lazy_import": 1}, dry=False, recursive=True) if LooseVersion( "{}.{}".format(*sys.version_info[0:2])) < LooseVersion("3.7"): pytest.skip() dpath = dirname(paths["root"]) with ub.util_import.PythonPathContext(dpath): import mkinit_dummy_module print("mkinit_dummy_module = {!r}".format(mkinit_dummy_module)) print(dir(mkinit_dummy_module)) print("mkinit_dummy_module.a_very_nested_function = {!r}".format( mkinit_dummy_module.a_very_nested_function)) mkinit_dummy_module.a_very_nested_function()
def _demodata_toy_harn(): # This will train a toy model with toy data using netharn import netharn as nh hyper = nh.HyperParams( **{ 'workdir': ub.ensure_app_cache_dir('torch_liberator/tests/deploy'), 'name': 'demo_liberator_static', 'xpu': nh.XPU.coerce('cpu'), 'datasets': { 'train': nh.data.ToyData2d(size=3, rng=0) }, 'loaders': { 'batch_size': 64 }, 'model': (nh.models.ToyNet2d, {}), 'optimizer': (nh.optimizers.SGD, { 'lr': 0.0001 }), 'criterion': (nh.criterions.FocalLoss, {}), 'initializer': (nh.initializers.KaimingNormal, {}), 'monitor': (nh.Monitor, { 'max_epoch': 1 }), }) harn = nh.FitHarn(hyper) harn.preferences['use_tensorboard'] = False harn.preferences['log_gradients'] = False harn.preferences['timeout'] = 1 return harn
def __init__(self, fname, cfgstr=None, dpath=None, appname='ubelt', ext='.pkl', meta=None, verbose=None, enabled=True, log=None, protocol=2): import ubelt as ub if verbose is None: verbose = self.VERBOSE if dpath is None: # pragma: no branch dpath = ub.ensure_app_cache_dir(appname) ub.ensuredir(dpath) self.dpath = dpath self.fname = fname self.cfgstr = cfgstr self.verbose = verbose self.ext = ext self.meta = meta self.enabled = enabled self.protocol = protocol self.log = print if log is None else log if len(self.ext) > 0 and self.ext[0] != '.': raise ValueError('Please be explicit and use a dot in ext')
def test_globstr_with_nargs(): from os.path import join import ubelt as ub import scriptconfig as scfg dpath = ub.ensure_app_cache_dir('scriptconfig/tests/files') ub.touch(join(dpath, 'file1.txt')) ub.touch(join(dpath, 'file2.txt')) ub.touch(join(dpath, 'file3.txt')) class TestConfig(scfg.Config): default = { 'paths': scfg.Value(None, nargs='+'), } cmdline = '--paths {dpath}/*'.format(dpath=dpath) config = TestConfig(cmdline=cmdline) # ub.cmd(f'echo {dpath}/*', shell=True) import glob cmdline = '--paths ' + ' '.join(list(glob.glob(join(dpath, '*')))) config = TestConfig(cmdline=cmdline) cmdline = '--paths=' + ','.join(list(glob.glob(join(dpath, '*')))) config = TestConfig(cmdline=cmdline)
def test_overwrite_symlink(): """ CommandLine: python ~/code/ubelt/tests/test_links.py test_overwrite_symlink """ # TODO: test that we handle broken links dpath = ub.ensure_app_cache_dir('ubelt', 'test_overwrite_symlink') ub.delete(dpath, verbose=2) ub.ensuredir(dpath, verbose=2) happy_fpath = join(dpath, 'happy_fpath.txt') other_fpath = join(dpath, 'other_fpath.txt') happy_flink = join(dpath, 'happy_flink.txt') for verbose in [2, 1, 0]: print('@==========@') print('verbose = {!r}'.format(verbose)) print('[test] Setup') ub.delete(dpath, verbose=verbose) ub.ensuredir(dpath, verbose=verbose) ub.touch(happy_fpath, verbose=verbose) ub.touch(other_fpath, verbose=verbose) print('[test] Dirstats dpath') util_links._dirstats(dpath) print('[test] Create initial link (to happy)') ub.symlink(happy_fpath, happy_flink, verbose=verbose) print('[test] Dirstats dpath') util_links._dirstats(dpath) # Creating a duplicate link print('[test] Create a duplicate link (to happy)') ub.symlink(happy_fpath, happy_flink, verbose=verbose) print('[test] Dirstats dpath') util_links._dirstats(dpath) print('[test] Create an unauthorized overwrite link (to other)') with pytest.raises(Exception) as exc_info: # file exists error ub.symlink(other_fpath, happy_flink, verbose=verbose) print(' * exc_info = {!r}'.format(exc_info)) print('[test] Create an authorized overwrite link (to other)') ub.symlink(other_fpath, happy_flink, verbose=verbose, overwrite=True) print('[test] Dirstats dpath') ub.delete(other_fpath, verbose=verbose) print('[test] Create an unauthorized overwrite link (back to happy)') with pytest.raises(Exception) as exc_info: # file exists error ub.symlink(happy_fpath, happy_flink, verbose=verbose) print(' * exc_info = {!r}'.format(exc_info)) print('[test] Create an authorized overwrite link (back to happy)') ub.symlink(happy_fpath, happy_flink, verbose=verbose, overwrite=True)
def main(): try: script = __file__ except NameError: raise # for Ipython hacking script = ub.expandpath( '~/code/ndsampler/dev/devcheck_python23_isect_index_cache.py') # py2 = ub.find_exe('python2') # py3 = ub.find_exe('python3') # ub.cmd([py2, script, 'load_regions'], shell=True) # ub.cmd([py3, script, 'save_regions'], shell=True) # Register scripts for activating python 2/3 virtual envs that have # ndsampler installed import getpass username = getpass.getuser() if username in ['joncrall', 'jon.crall']: # Hack for Jon's computer activate_cmds = { 'python2': 'we py2.7', 'python3': 'we venv3.6', } else: assert False, 'need to customize activation scripts for your machine' activate_cmds = { 'python2': 'source ~/venv27/bin/activate', 'python3': 'conda activate py36', } def run(py): bash_cmd = ' && '.join([ 'source $HOME/.bashrc', activate_cmds[py], 'python {} access_cache'.format(script), ]) sh_cmd = 'bash -c "{}"'.format(bash_cmd) info = ub.cmd(sh_cmd, shell=True, verbose=3) return info workdir = ub.ensure_app_cache_dir('ndsampler', 'tests', '23_regions') # Save in python3, load in python2 print('\n\n--- SAVE Python3, LOAD Python2 ---') ub.delete(workdir, verbose=1) info = run('python3') # NOQA assert info['ret'] == 0 info = run('python2') # NOQA assert info['ret'] == 0 print('\n\n--- SAVE Python2, LOAD Python3 ---') ub.delete(workdir, verbose=1) # Clear the cache info = run('python2') # NOQA assert info['ret'] == 0 info = run('python3') # NOQA assert info['ret'] == 0
def test_touch(): import ubelt as ub dpath = ub.ensure_app_cache_dir('ubelt') fpath = join(dpath, 'touch_file') assert not exists(fpath) ub.touch(fpath, verbose=True) assert exists(fpath) os.unlink(fpath)
def 取缓存目录(name): """ # The resource root directory is # ~/AppData/Roaming on Windows, # ~/.config on Linux and ~/Library/Application Support on Mac. # The cache root directory is ~/AppData/Local on Windows, # ~/.config on Linux and ~/Library/Caches on Mac. """ return ub.shrinkuser(ub.ensure_app_cache_dir(name))
def test_ensuredir_recreate(): base = ub.ensure_app_cache_dir('ubelt/tests') folder = join(base, 'foo') member = join(folder, 'bar') ub.ensuredir(folder, recreate=True) ub.ensuredir(member) assert exists(member) ub.ensuredir(folder, recreate=True) assert not exists(member)
def _create_test_filesystem(): dpath = ub.ensure_app_cache_dir('xdev/test_search_replace') text1 = ub.paragraph(''' Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum. ''') text2 = ub.codeblock(''' def fib(n): a, b = 0, 1 while a < n: print(a, end=' ') a, b = b, a+b print() fib(1000) ''') text3 = ub.codeblock(''' This file contains Lorem and fib Newlines fib lorem fib ''') text4 = '' fpath1 = join(dpath, 'lorium.txt') fpath2 = join(dpath, 'fib.py') fpath3 = join(dpath, 'foo.txt') fpath4 = join(ub.ensuredir((dpath, 'subdir')), 'foo.txt') with open(fpath1, 'w') as file: file.write(text1) with open(fpath2, 'w') as file: file.write(text2) with open(fpath3, 'w') as file: file.write(text3) with open(fpath4, 'w') as file: file.write(text4) info = { 'root': dpath, 'contents': [fpath1, fpath2, fpath3], } return info
def test_monkeypatch_open(): import ubelt as ub from os.path import join code1 = ub.codeblock( ''' import builtins def _make_dummy(x): def _dummy_open(x): return x _dummy_open.__name__ = x return _dummy_open if 0: builtins.open = _make_dummy('overwrite-builtins-open-preimport') import mod2 mod2.check_relevant() if 1: builtins.open = _make_dummy('overwrite-builtins-open') mod2.check_relevant() if 1: mod2.open = _make_dummy('overwrite-mod2-open') mod2.check_relevant() ''') code2 = ub.codeblock( ''' attr1 = 'foo' print(open) def check_relevant(): print('--- MOD 2 VARS ---') print('attr1 = {!r}'.format(attr1)) print('open = {!r}'.format(open)) print('__builtins__["open"] = {!r}'.format(__builtins__['open'])) print('globals()["open"] = {!r}'.format(globals().get('open', None))) open_result = open('foo') print('open_result = {!r}'.format(open_result)) ''') dpath = ub.ensure_app_cache_dir('ubelt', 'test_monkeypatch') mod1_fpath = join(dpath, 'mod1.py') mod2_fpath = join(dpath, 'mod2.py') with open(mod1_fpath, 'w') as file: file.write(code1) with open(mod2_fpath, 'w') as file: file.write(code2) info = ub.cmd('python mod1.py', cwd=dpath, verbose=3)
def test_readwrite(): import ubelt as ub dpath = ub.ensure_app_cache_dir('ubelt') fpath = dpath + '/' + 'testwrite.txt' if exists(fpath): os.remove(fpath) to_write = 'utf-8 symbols Δ, Й, ק, م, ๗, あ, 叶, 葉, and 말.' ub.writeto(fpath, to_write, verbose=True) read_ = ub.readfrom(fpath, verbose=True) assert read_ == to_write
def test_grabdata_fname_only(): url = 'http://i.imgur.com/rqwaDag.png' dpath = ub.ensure_app_cache_dir('ubelt') fname = 'mario.png' fpath = join(dpath, fname) got_fpath = ub.grabdata(url, fname=fname) assert got_fpath == fpath assert exists(fpath)
def test_grabdata_dpath_only(): url = 'http://i.imgur.com/rqwaDag.png' dpath = ub.ensure_app_cache_dir('ubelt', 'test') fname = basename(url) fpath = join(dpath, fname) got_fpath = ub.grabdata(url, dpath=dpath) assert got_fpath == fpath assert exists(fpath)
def test_lr(): size = 3 datasets = { 'train': nh.data.ToyData2d(size=size, border=1, n=256, rng=0), # 'vali': nh.data.ToyData2d(size=size, border=1, n=128, rng=1), } lr = 1.0 hyper = { # --- data first 'datasets': datasets, 'nice': 'restart_lr', 'workdir': ub.ensure_app_cache_dir('netharn/test/restart_lr'), 'loaders': { 'batch_size': 64 }, 'xpu': nh.XPU.cast('cpu'), # --- algorithm second 'model': (nh.models.ToyNet2d, {}), 'optimizer': (nh.optimizers.SGD, { 'lr': 99 }), 'criterion': (nh.criterions.FocalLoss, {}), 'initializer': (nh.initializers.NoOp, {}), 'scheduler': (nh.schedulers.ListedLR, { 'points': { 0: lr * 0.10, 1: lr * 1.00, 10: lr * 0.10, 13: lr * 0.01, }, 'interpolate': False, }), 'dynamics': { 'batch_step': 4 }, 'monitor': (nh.Monitor, { 'max_epoch': 15 }), } harn = MyHarn(hyper=hyper) harn.config['use_tqdm'] = 0 # Delete previous data harn.initialize(reset='delete') # Cause the harness to fail harn.run() print(ub.repr2(harn.epoch_to_lr, nl=1))
def test_static_init(): """ python ~/code/mkinit/tests/test_with_dummy.py test_static_import_without_init """ import mkinit cache_dpath = ub.ensure_app_cache_dir('mkinit/tests') paths = make_dummy_package(cache_dpath) modpath = paths['root'] text = mkinit.static_init(modpath) check_dummy_root_init(text)
def test_static_find_locals(): """ python ~/code/mkinit/tests/test_with_dummy.py test_static_find_locals """ import mkinit cache_dpath = ub.ensure_app_cache_dir('mkinit/tests') paths = make_dummy_package(cache_dpath) ub.delete(paths['root_init']) modpath = paths['root'] imports = list(mkinit.static_mkinit._find_local_submodules(modpath)) print('imports = {!r}'.format(imports))
def run(self, dry=False): """ Executes this pipeline. Writes a temporary pipeline file to your sprokit cache directory and calls the pipeline_runner. """ cache_dir = ub.ensure_app_cache_dir('sprokit', 'temp_pipelines') # TODO make a name based on a hash of the text to avoid race conditions pipe_fpath = join(cache_dir, 'temp_pipeline_file.pipe') self.write(pipe_fpath) run_pipe_file(pipe_fpath, dry=dry)
def demodata_input(dataset='demo'): """ Specifies the input files for testing and demos """ if dataset == 'demo': import zipfile from os.path import commonprefix dpath = ub.ensure_app_cache_dir('camtrawl') try: demodata_zip = ub.grabdata('http://acidalia:8000/data/camtrawl_demodata.zip', dpath=dpath) except Exception: raise ValueError( 'Demo data is currently only available on Kitware VPN') with zipfile.ZipFile(demodata_zip) as zfile: dname = commonprefix(zfile.namelist()) data_fpath = join(dpath, dname) if not exists(data_fpath): zfile.extractall(dpath) cal_fpath = join(data_fpath, 'cal.npz') img_path1 = join(data_fpath, 'left') img_path2 = join(data_fpath, 'right') elif dataset == 'test': data_fpath = expanduser('~/data/autoprocess_test_set') cal_fpath = join(data_fpath, 'cal_201608.mat') img_path1 = join(data_fpath, 'image_data/left') img_path2 = join(data_fpath, 'image_data/right') elif dataset == 'haul83-small': data_fpath = expanduser('~/data/camtrawl_stereo_sample_data_small') cal_fpath = join(data_fpath, '201608_calibration_data/selected/Camtrawl_2016.npz') img_path1 = join(data_fpath, 'Haul_83/left') img_path2 = join(data_fpath, 'Haul_83/right') elif dataset == 'haul83': data_fpath = expanduser('~/data/camtrawl_stereo_sample_data/') cal_fpath = join(data_fpath, '201608_calibration_data/selected/Camtrawl_2016.npz') img_path1 = join(data_fpath, 'Haul_83/D20160709-T021759/images/AB-800GE_00-0C-DF-06-40-BF') # left img_path2 = join(data_fpath, 'Haul_83/D20160709-T021759/images/AM-800GE_00-0C-DF-06-20-47') # right else: raise ValueError('unknown dataset={!r}'.format(dataset)) return img_path1, img_path2, cal_fpath