def test_download_bad_url(): """ Check that we error when the url is bad CommandLine: python -m ubelt.tests.test_download test_download_bad_url --verbose """ url = 'http://a-very-incorrect-url' if not ub.argflag('--network'): pytest.skip('not running network tests') dpath = ub.ensure_app_cache_dir('ubelt', 'tests') fname = basename(url) fpath = join(dpath, fname) ub.delete(fpath) assert not exists(fpath) # from ubelt.util_download import URLError import six if six.PY2: # nocover from urllib2 import URLError # NOQA else: from urllib.error import URLError # NOQA with pytest.raises(URLError): ub.download(url, fpath=fpath, verbose=1)
def make_lowres_scene(task, scene): scene_gtfull_dpath = task.datasubdir('gt' + 'full', scene) scene_imfull_dpath = task.datasubdir('im' + 'full', scene) gt_fpaths = sorted(glob.glob(join(scene_gtfull_dpath, '*.png'))) im_fpaths = sorted(glob.glob(join(scene_imfull_dpath, '*.png'))) # Define the output path for this preprocessing mode mode = 'lowres' scene_gtout_dpath = task.datasubdir('gt' + mode, scene) scene_imout_dpath = task.datasubdir('im' + mode, scene) # Start fresh. Remove existing files ub.delete(scene_gtout_dpath, verbose=False) ub.delete(scene_imout_dpath, verbose=False) ub.ensuredir(scene_gtout_dpath) ub.ensuredir(scene_imout_dpath) target_dsize = tuple(task.input_shape[::-1]) for impath, gtpath in zip(im_fpaths, gt_fpaths): im = cv2.imread(impath, flags=cv2.IMREAD_UNCHANGED) gt = cv2.imread(gtpath, flags=cv2.IMREAD_UNCHANGED) im_lowres = cv2.resize(im, target_dsize, interpolation=cv2.INTER_LANCZOS4) gt_lowres = cv2.resize(gt, target_dsize, interpolation=cv2.INTER_NEAREST) fname = basename(impath) cv2.imwrite(join(scene_imout_dpath, fname), im_lowres) cv2.imwrite(join(scene_gtout_dpath, fname), gt_lowres) return scene_imout_dpath, scene_gtout_dpath
def _convert_to_cog_worker(src_fpath, dst_fpath, cog_config): """ worker function """ if not exists(dst_fpath): _cli_convert_cloud_optimized_geotiff(src_fpath, dst_fpath, **cog_config) success = False max_tries = 3 for try_num in range(max_tries): info = _validate_cog_worker(dst_fpath=dst_fpath, orig_fpath=src_fpath) if info['status'] == 'pass': success = True break else: print('ATTEMPT TO RECOVER FROM ERROR info = {!r}'.format(info)) print('src_fpath = {!r}'.format(src_fpath)) print('dst_fpath = {!r}'.format(dst_fpath)) ub.delete(dst_fpath) _cli_convert_cloud_optimized_geotiff(src_fpath, dst_fpath, **cog_config) if not success: raise Exception( 'ERROR CONVERTING TO COG: src_fpath={}, dst_fpath={}'.format( src_fpath, dst_fpath)) return dst_fpath
def stage_self(ROOT, staging_dpath): import shutil # stage the important files in this repo dist_paths = [ 'pyhesaff', 'src', 'CMakeLists.txt', 'setup.py', 'run_doctests.sh', 'CMake', 'run_tests.py' ] from os.path import isfile, exists def copy3(src, dst): if exists(dst) and isfile(dst): os.unlink(dst) shutil.copy2(src, dst) mirror_dpath = ub.ensuredir((staging_dpath, 'hesaff')) copy_function = shutil.copy2 copy_function = copy3 print('======') for pname in dist_paths: src = join(ROOT, pname) dst = join(mirror_dpath, pname) print('src={!r}, dst={!r}'.format(src, dst)) if os.path.isdir(pname): ub.delete(dst) shutil.copytree(src, dst, copy_function=copy_function) else: copy_function(src, dst) print('======')
def test_broken_link(): """ CommandLine: python -m ubelt.tests.test_links test_broken_link """ dpath = ub.ensure_app_cache_dir('ubelt', 'test_broken_link') ub.delete(dpath, verbose=2) ub.ensuredir(dpath, verbose=2) util_links._dirstats(dpath) broken_fpath = join(dpath, 'broken_fpath.txt') broken_flink = join(dpath, 'broken_flink.txt') ub.touch(broken_fpath, verbose=2) util_links._dirstats(dpath) ub.symlink(broken_fpath, broken_flink, verbose=2) util_links._dirstats(dpath) ub.delete(broken_fpath, verbose=2) util_links._dirstats(dpath) # make sure I am sane that this is the correct check. can_symlink = util_links._can_symlink() print('can_symlink = {!r}'.format(can_symlink)) if can_symlink: # normal behavior assert islink(broken_flink) assert not exists(broken_flink) else: # on windows hard links are essentially the same file. # there is no trace that it was actually a link. assert exists(broken_flink)
def test_download_bad_url(): """ Check that we error when the url is bad Notes: For some reason this can take a long time to realize there is no URL, even if the timeout is specified and fairly low. CommandLine: python tests/test_download.py test_download_bad_url --verbose """ url = 'http://www.a-very-incorrect-url.gov/does_not_exist.txt' # if not ub.argflag('--network'): # pytest.skip('not running network tests') # Ensure the opener exist import sys if sys.version_info[0] == 2: # nocover # import urllib2 as urllib_x from urllib2 import URLError # NOQA else: # import urllib.request as urllib_x from urllib.error import URLError # NOQA # if urllib_x._opener is None: # urllib_x.install_opener(urllib_x.build_opener()) dpath = ub.ensure_app_cache_dir('ubelt', 'tests') fname = basename(url) fpath = join(dpath, fname) ub.delete(fpath) assert not exists(fpath) with pytest.raises(URLError): ub.download(url, fpath=fpath, verbose=1, timeout=1.0)
def test_cant_overwrite_file_with_symlink(): if ub.WIN32: # Can't distinguish this case on windows pytest.skip() dpath = ub.ensure_app_cache_dir('ubelt', 'test_cant_overwrite_file_with_symlink') ub.delete(dpath, verbose=2) ub.ensuredir(dpath, verbose=2) happy_fpath = join(dpath, 'happy_fpath.txt') happy_flink = join(dpath, 'happy_flink.txt') for verbose in [2, 1, 0]: print('=======') print('verbose = {!r}'.format(verbose)) ub.delete(dpath, verbose=verbose) ub.ensuredir(dpath, verbose=verbose) ub.touch(happy_fpath, verbose=verbose) ub.touch(happy_flink) # create a file where a link should be util_links._dirstats(dpath) with pytest.raises(FileExistsError): # file exists error ub.symlink(happy_fpath, happy_flink, overwrite=False, verbose=verbose) with pytest.raises(FileExistsError): # file exists error ub.symlink(happy_fpath, happy_flink, overwrite=True, verbose=verbose)
def clean(): """ __file__ = ub.truepath('~/code/clab/setup.py') """ import ubelt as ub import os modname = 'clab' repodir = dirname(__file__) # pkgdir = join(repodir, modname) toremove = [] for root, dnames, fnames in os.walk(repodir): if os.path.basename(root) == modname + '.egg-info': toremove.append(root) del dnames[:] if os.path.basename(root) == '__pycache__': toremove.append(root) del dnames[:] if os.path.basename(root) == '_ext': # Remove torch extensions toremove.append(root) del dnames[:] if os.path.basename(root) == 'build': # Remove python c extensions if len(dnames) == 1 and dnames[0].startswith('temp.'): toremove.append(root) del dnames[:] # Remove simple pyx inplace extensions for fname in fnames: if fname.endswith('.so') or fname.endswith('.c'): if fname.split('.')[0] + '.pyx' in fnames: toremove.append(join(root, fname)) def enqueue(d): if exists(d) and d not in toremove: toremove.append(d) enqueue(join(repodir, 'clab/models/yolo2/utils/cython_yolo.c') ) enqueue(join(repodir, 'clab/models/yolo2/utils/cython_bbox.c') ) enqueue(join(repodir, 'clab/models/yolo2/utils/nms/cpu_nms.c') ) enqueue(join(repodir, 'clab/models/yolo2/utils/nms/cpu_nms.c') ) enqueue(join(repodir, 'clab/models/yolo2/utils/nms/cpu_nms.cpp') ) enqueue(join(repodir, 'clab/models/yolo2/layers/roi_pooling/_ext') ) enqueue(join(repodir, 'clab/models/yolo2/layers/reorg/_ext') ) import glob for d in glob.glob(join(repodir, 'clab/models/yolo2/utils/nms/*_nms.*so')): enqueue(d) for dpath in toremove: # print('Removing dpath = {!r}'.format(dpath)) ub.delete(dpath, verbose=1)
def main(): try: script = __file__ except NameError: raise # for Ipython hacking script = ub.expandpath( '~/code/ndsampler/dev/devcheck_python23_isect_index_cache.py') # py2 = ub.find_exe('python2') # py3 = ub.find_exe('python3') # ub.cmd([py2, script, 'load_regions'], shell=True) # ub.cmd([py3, script, 'save_regions'], shell=True) # Register scripts for activating python 2/3 virtual envs that have # ndsampler installed import getpass username = getpass.getuser() if username in ['joncrall', 'jon.crall']: # Hack for Jon's computer activate_cmds = { 'python2': 'we py2.7', 'python3': 'we venv3.6', } else: assert False, 'need to customize activation scripts for your machine' activate_cmds = { 'python2': 'source ~/venv27/bin/activate', 'python3': 'conda activate py36', } def run(py): bash_cmd = ' && '.join([ 'source $HOME/.bashrc', activate_cmds[py], 'python {} access_cache'.format(script), ]) sh_cmd = 'bash -c "{}"'.format(bash_cmd) info = ub.cmd(sh_cmd, shell=True, verbose=3) return info workdir = ub.ensure_app_cache_dir('ndsampler', 'tests', '23_regions') # Save in python3, load in python2 print('\n\n--- SAVE Python3, LOAD Python2 ---') ub.delete(workdir, verbose=1) info = run('python3') # NOQA assert info['ret'] == 0 info = run('python2') # NOQA assert info['ret'] == 0 print('\n\n--- SAVE Python2, LOAD Python3 ---') ub.delete(workdir, verbose=1) # Clear the cache info = run('python2') # NOQA assert info['ret'] == 0 info = run('python3') # NOQA assert info['ret'] == 0
def test_npy_backend(): dset = ndsampler.CocoSampler.demo('shapes11', backend='npy') assert dset.frames._backend['type'] == 'npy' assert dset.frames.cache_dpath is not None ub.delete(dset.frames.cache_dpath) ub.ensuredir(dset.frames.cache_dpath) raw_img = dset.load_image(1) assert isinstance(raw_img, np.memmap)
def _startfresh(task): scene_im_dpaths, scene_gt_dpaths = task._all_scene_dpaths() keys = task._preprocessing_keys() for scene, key in it.product(task.scene_ids, keys): im_dpath = task.datasubdir('im' + key, scene) gt_dpath = task.datasubdir('gt' + key, scene) ub.delete(gt_dpath) ub.delete(im_dpath)
def test_grabdata_delete_hash_stamp(): import ubelt as ub fname = 'foo3.bar' url = _demo_url(128 * 12) prefix1 = '43f92597d7eb08b57c88b636' fpath = ub.grabdata(url, fname=fname, hash_prefix=prefix1) stamp_fpath = fpath + '.sha512.hash' ub.delete(stamp_fpath) fpath = ub.grabdata(url, fname=fname, hash_prefix=prefix1)
def _devcheck_manage_monitor(workdir, dry=True): all_sessions = collect_sessions(workdir) # Get all the images in the monitor directories # (this is a convention and not something netharn does by default) all_files = [] # factor = 100 max_keep = 300 def _choose_action(file_infos): import kwarray file_infos = kwarray.shuffle(file_infos, rng=0) n_keep = max_keep # n_keep = (len(file_infos) // factor) + 1 # n_keep = min(max_keep, n_keep) for info in file_infos[:n_keep]: info['action'] = 'keep' for info in file_infos[n_keep:]: info['action'] = 'delete' for session in ub.ProgIter(all_sessions, desc='checking monitor files'): dpaths = [ join(session.dpath, 'monitor', 'train', 'batch'), join(session.dpath, 'monitor', 'vali', 'batch'), join(session.dpath, 'monitor', 'train'), join(session.dpath, 'monitor', 'vali'), ] exts = ['*.jpg', '*.png'] for dpath in dpaths: for ext in exts: fpaths = list(glob.glob(join(dpath, ext))) file_infos = [{ 'size': os.stat(p).st_size, 'fpath': p } for p in fpaths] _choose_action(file_infos) all_files.extend(file_infos) grouped_actions = ub.group_items(all_files, lambda x: x['action']) for key, group in grouped_actions.items(): size = byte_str(sum([s['size'] for s in group])) print('{:>4} images: {:>4}, size={}'.format(key.capitalize(), len(group), size)) if dry: print('Dry run') else: delete = grouped_actions.get('delete', []) delete_fpaths = [item['fpath'] for item in delete] for p in ub.ProgIter(delete_fpaths, desc='deleting'): ub.delete(p)
def clean_repo(repodir, modname, rel_paths=[]): """ repodir = ub.truepath('~/code/netharn/') modname = 'netharn' rel_paths = [ 'netharn/util/nms/cpu_nms.c', 'netharn/util/nms/cpu_nms.c', 'netharn/util/nms/cpu_nms.cpp', 'netharn/util/nms/cython_boxes.c', 'netharn/util/nms/cython_boxes.html', ] """ toremove = [] for root, dnames, fnames in os.walk(repodir): if os.path.basename(root) == modname + '.egg-info': toremove.append(root) del dnames[:] if os.path.basename(root) == '__pycache__': toremove.append(root) del dnames[:] if os.path.basename(root) == '_ext': # Remove torch extensions toremove.append(root) del dnames[:] if os.path.basename(root) == 'build': # Remove python c extensions if len(dnames) == 1 and dnames[0].startswith('temp.'): toremove.append(root) del dnames[:] # Remove simple pyx inplace extensions for fname in fnames: if fname.endswith('.so') or fname.endswith('.c'): if fname.split('.')[0] + '.pyx' in fnames: toremove.append(join(root, fname)) def enqueue(d): if exists(d) and d not in toremove: toremove.append(d) abs_paths = [ join(repodir, p) for pat in rel_paths for p in glob.glob(pat, recursive=True) ] for abs_path in abs_paths: enqueue(abs_path) for dpath in toremove: # print('Removing dpath = {!r}'.format(dpath)) ub.delete(dpath, verbose=1)
def make_part_scene(task, scene, scale=1): """ Slices the full scene into smaller parts that fit into the network but are at the original resolution (or higher). >>> scene = '0001' >>> scale = 1 """ if task.part_overlap < 0 or task.part_overlap >= 1: raise ValueError(('part overlap was {}, but it must be ' 'in the range [0, 1)').format(task.part_overlap)) input_shape = task.input_shape overlap = task.part_overlap keepbound = task.part_keepbound scene_gtfull_dpath = task.datasubdir('gt' + 'full', scene) scene_imfull_dpath = task.datasubdir('im' + 'full', scene) gt_fpaths = sorted(glob.glob(join(scene_gtfull_dpath, '*.png'))) im_fpaths = sorted(glob.glob(join(scene_imfull_dpath, '*.png'))) # Define the output path for this preprocessing mode mode = 'part-scale{}'.format(scale) scene_gtout_dpath = task.datasubdir('gt' + mode, scene) scene_imout_dpath = task.datasubdir('im' + mode, scene) # Start fresh. Remove existing files ub.delete(scene_gtout_dpath, verbose=False) ub.delete(scene_imout_dpath, verbose=False) ub.ensuredir(scene_gtout_dpath) ub.ensuredir(scene_imout_dpath) for impath, gtpath in zip(im_fpaths, gt_fpaths): im = cv2.imread(impath, flags=cv2.IMREAD_UNCHANGED) gt = cv2.imread(gtpath, flags=cv2.IMREAD_UNCHANGED) if scale != 1.0: im = imutil.imscale(im, scale, cv2.INTER_LANCZOS4)[0] gt = imutil.imscale(gt, scale, cv2.INTER_NEAREST)[0] assert gt.max() <= task.labels.max(), ( 'be careful not to change gt labels') fname_we = splitext(basename(impath))[0] sl_gen = imutil.image_slices(im.shape[0:2], input_shape, overlap, keepbound) for idx, rc_slice in enumerate(sl_gen): # encode the slice in the image name? fname = '{}_part{:0=4d}.png'.format(fname_we, idx) im_part = im[rc_slice] gt_part = gt[rc_slice] cv2.imwrite(join(scene_imout_dpath, fname), im_part) cv2.imwrite(join(scene_gtout_dpath, fname), gt_part) return scene_imout_dpath, scene_gtout_dpath
def test_static_find_locals(): """ python ~/code/mkinit/tests/test_with_dummy.py test_static_find_locals """ import mkinit cache_dpath = ub.ensure_app_cache_dir('mkinit/tests') paths = make_dummy_package(cache_dpath) ub.delete(paths['root_init']) modpath = paths['root'] imports = list(mkinit.static_mkinit._find_local_submodules(modpath)) print('imports = {!r}'.format(imports))
def ensuredir(dpath, mode=0o1777, verbose=0, recreate=False): r""" Ensures that directory will exist. Creates new dir with sticky bits by default Args: dpath (str | PathLike | Tuple[str | PathLike]): dir to ensure. Can also be a tuple to send to join mode (int, default=0o1777): octal mode of directory verbose (int, default=0): verbosity recreate (bool, default=False): if True removes the directory and all of its contents and creates a fresh new directory. USE CAREFULLY. Returns: str: path - the ensured directory SeeAlso: :func:`ubelt.Path.ensuredir` Note: This function is not thread-safe in Python2 Example: >>> from ubelt.util_path import * # NOQA >>> import ubelt as ub >>> cache_dpath = ub.ensure_app_cache_dir('ubelt') >>> dpath = join(cache_dpath, 'ensuredir') >>> if exists(dpath): ... os.rmdir(dpath) >>> assert not exists(dpath) >>> ub.ensuredir(dpath) >>> assert exists(dpath) >>> os.rmdir(dpath) """ if isinstance(dpath, (list, tuple)): dpath = join(*dpath) if recreate: import ubelt as ub ub.delete(dpath, verbose=verbose) if not exists(dpath): if verbose: print('Ensuring directory (creating {!r})'.format(dpath)) if PY2: # nocover os.makedirs(normpath(dpath), mode=mode) else: os.makedirs(normpath(dpath), mode=mode, exist_ok=True) else: if verbose: print('Ensuring directory (existing {!r})'.format(dpath)) return dpath
def test_static_import_without_init(): """ python ~/code/mkinit/tests/test_with_dummy.py test_static_import_without_init """ import mkinit cache_dpath = ub.ensure_app_cache_dir('mkinit/tests') paths = make_dummy_package(cache_dpath) ub.delete(paths['root_init']) modpath = paths['root'] text = mkinit.static_init(modpath) check_dummy_root_init(text)
def make_augment_scene(task, mode, scene, rng=None): """ Augments data in a scene of a specific "mode" mode = 'part-scale1' scene = '0000' rng = 'determ' gtdir = task.datasubdir('gtpart', scene)) imdir = task.datasubdir('impart', scene)) """ assert task.enable_augment if rng == 'determ': # Make a determenistic seed based on the scene and mode seed = int(hashutil.hash_data([scene, mode], alphabet='hex'), 16) seed = seed % (2**32 - 1) rng = np.random.RandomState(seed) auger = augment.SSegAugmentor(rng=rng, ignore_label=task.ignore_label) auger.params = task.aug_params # rng = np.random.RandomState(0) imdir = task.datasubdir('im' + mode, scene) gtdir = task.datasubdir('gt' + mode, scene) im_fpaths = sorted(glob.glob(join(imdir, '*.png'))) gt_fpaths = sorted(glob.glob(join(gtdir, '*.png'))) # Define the output path for the augmentation of this mode key = mode + '-aug' scene_imout_dpath = task.datasubdir('im' + key, scene) scene_gtout_dpath = task.datasubdir('gt' + key, scene) # Start fresh. Remove existing files ub.delete(scene_gtout_dpath, verbose=False) ub.delete(scene_imout_dpath, verbose=False) ub.ensuredir(scene_gtout_dpath) ub.ensuredir(scene_imout_dpath) for impath, gtpath in ub.ProgIter( list(zip(im_fpaths, gt_fpaths)), label=' * augment mode={}'.format(mode)): fname_we = splitext(basename(impath))[0] im = cv2.imread(impath, flags=cv2.IMREAD_UNCHANGED) gt = cv2.imread(gtpath, flags=cv2.IMREAD_UNCHANGED) aug_gen = auger.augment(im, gt) for augx, aug_data in enumerate(aug_gen): (im_aug, gt_aug) = aug_data[0:2] fname = '{}_aug{:0=4d}.png'.format(fname_we, augx) cv2.imwrite(join(scene_imout_dpath, fname), im_aug) cv2.imwrite(join(scene_gtout_dpath, fname), gt_aug) return scene_imout_dpath, scene_gtout_dpath
def make_simple_dummy_package(): """ Creates a dummy package structure with or without __init__ files ANY EXISTING FILES ARE DELETED """ # Fresh start dpath = ub.ensure_app_cache_dir("mkinit/test/simple_demo/") ub.delete(dpath) ub.ensuredir(dpath) rel_paths = { "root": "mkinit_demo_pkg", "root_init": "mkinit_demo_pkg/__init__.py", "submod": "mkinit_demo_pkg/submod.py", "subpkg": "mkinit_demo_pkg/subpkg", "subpkg_init": "mkinit_demo_pkg/subpkg/__init__.py", "nested": "mkinit_demo_pkg/subpkg/nested.py", } paths = {key: join(dpath, path) for key, path in rel_paths.items()} for key, path in paths.items(): if not path.endswith(".py"): ub.ensuredir(path) for key, path in paths.items(): if path.endswith(".py"): ub.touch(path) with open(paths["submod"], "w") as file: file.write( ub.codeblock( """ print('SUBMOD SIDE EFFECT') def submod_func(): print('This is a submod func in {}'.format(__file__)) """ ) ) with open(paths["nested"], "w") as file: file.write( ub.codeblock( """ print('NESTED SIDE EFFECT') def nested_func(): print('This is a nested func in {}'.format(__file__)) """ ) ) return paths
def _cleanup(self): # print('self._cleanup = {!r}'.format(self._cleanup)) if self._handle is not None: if not getattr(self, 'closed', True): closemethod = getattr(self, 'close', None) if closemethod is not None: # nocover closemethod() closemethod = None self._handle = None if self._temp_dpath and exists(self._temp_dpath): # os.unlink(self._temp_dpath) import ubelt as ub ub.delete(self._temp_dpath)
def test_cog_backend(): try: import gdal # NOQA except ImportError: import pytest pytest.skip('cog requires gdal') dset = ndsampler.CocoSampler.demo('shapes13', backend='cog') assert dset.frames._backend['type'] == 'cog' assert dset.frames.cache_dpath is not None ub.delete(dset.frames.cache_dpath) ub.ensuredir(dset.frames.cache_dpath) raw_img = dset.load_image(5) assert not isinstance(raw_img, np.ndarray)
def test_rel_file_link(): dpath = ub.ensure_app_cache_dir('ubelt', 'test_rel_file_link') ub.delete(dpath, verbose=2) ub.ensuredir(dpath, verbose=2) real_fpath = join(ub.ensuredir((dpath, 'dir1')), 'real') link_fpath = join(ub.ensuredir((dpath, 'dir2')), 'link') ub.touch(real_fpath) orig = os.getcwd() try: os.chdir(dpath) real_path = relpath(real_fpath, dpath) link_path = relpath(link_fpath, dpath) link = ub.symlink(real_path, link_path) import sys if sys.platform.startswith('win32') and isfile(link): # Note: if windows hard links the file there is no way we can # tell that it was a symlink. Just verify it exists. from ubelt import _win32_links assert _win32_links._win32_is_hardlinked(real_fpath, link_fpath) else: pointed = ub.util_links._readlink(link) resolved = os.path.realpath( ub.expandpath(join(dirname(link), pointed))) assert os.path.realpath(ub.expandpath(real_fpath)) == resolved except Exception: util_links._dirstats(dpath) util_links._dirstats(join(dpath, 'dir1')) util_links._dirstats(join(dpath, 'dir2')) print('TEST FAILED: test_rel_link') print('real_fpath = {!r}'.format(real_fpath)) print('link_fpath = {!r}'.format(link_fpath)) print('real_path = {!r}'.format(real_path)) print('link_path = {!r}'.format(link_path)) try: if 'link' in vars(): print('link = {!r}'.format(link)) if 'pointed' in vars(): print('pointed = {!r}'.format(pointed)) if 'resolved' in vars(): print('resolved = {!r}'.format(resolved)) except Exception: print('...rest of the names are not available') raise finally: util_links._dirstats(dpath) util_links._dirstats(join(dpath, 'dir1')) util_links._dirstats(join(dpath, 'dir2')) os.chdir(orig)
def test_download_no_fpath(): url = 'http://i.imgur.com/rqwaDag.png' dpath = ub.ensure_app_cache_dir('ubelt') fname = basename(url) fpath = join(dpath, fname) ub.delete(fpath) assert not exists(fpath) got_fpath = ub.download(url) assert got_fpath == fpath assert exists(fpath)
def test_download_bad_url(): """ Check where the url is downloaded to when fpath is not specified. """ url = 'http://averyincorrecturl' dpath = ub.ensure_app_cache_dir('ubelt', 'tests') fname = basename(url) fpath = join(dpath, fname) ub.delete(fpath) assert not exists(fpath) with pytest.raises(Exception): ub.download(url, fpath=fpath)
def test_download_chunksize(): # url = 'https://www.dropbox.com/s/jl506apezj42zjz/ibeis-win32-setup-ymd_hm-2015-08-01_16-28.exe?dl=1' url = 'http://i.imgur.com/rqwaDag.png' dpath = ub.ensure_app_cache_dir('ubelt') fname = basename(url) fpath = join(dpath, fname) ub.delete(fpath) assert not exists(fpath) got_fpath = ub.download(url, chunksize=2) assert got_fpath == fpath assert exists(fpath)
def clean_tensorboard_protobufs(dpath): """ Removes event logs that only contain conflicting information dpath = '/home/local/KHQ/jon.crall/data/work_phase2/train/unet2/' CommandLine: python -m clab.util.misc clean_tensorboard_protobufs Example: >>> dpath = '.' >>> clean_tensorboard_protobufs(dpath) """ # from tensorflow.python.summary import event_accumulator from tensorboard.backend.event_processing import event_accumulator import glob from os.path import join from collections import defaultdict import ubelt as ub import tqdm # Clean out iterno overrides event_paths = sorted(glob.glob(join(dpath, 'events.out.tfevents*'))) bad_paths = set() good_paths = set() low_steps = defaultdict(lambda: float('inf')) for p in tqdm.tqdm(list(reversed(event_paths)), desc='cleaning'): ea = event_accumulator.EventAccumulator(p) ea.Reload() for key in ea.scalars.Keys(): events = ea.scalars.Items(key) for e in reversed(sorted(events, key=lambda e: e.wall_time)): if e.step < low_steps[key]: low_steps[key] = e.step good_paths.add(p) else: # Can we individually remove scalars? bad_paths.add(p) # print('skip e = {}, {!r}'.format(key, e)) # these paths only have conflicting old information. Just kill them onlybad_paths = bad_paths - good_paths print('onlybad_paths = {!r}'.format(onlybad_paths)) for p in onlybad_paths: ub.delete(p)
def test_download_no_fpath(): # url = 'http://i.imgur.com/rqwaDag.png' # if not ub.argflag('--network'): # pytest.skip('not running network tests') url = _demo_url() dpath = ub.ensure_app_cache_dir('ubelt') fname = basename(url) fpath = join(dpath, fname) ub.delete(fpath) assert not exists(fpath) got_fpath = ub.download(url) assert got_fpath == fpath assert exists(fpath)
def test_download_with_fpath(): url = 'http://i.imgur.com/rqwaDag.png' dpath = ub.ensure_app_cache_dir('ubelt', 'tests') fname = basename(url) fpath = join(dpath, fname) ub.delete(fpath) assert not exists(fpath) got_fpath = ub.download(url, fpath=fpath) assert got_fpath == fpath assert exists(fpath) with open(got_fpath, 'rb') as file: data = file.read() assert len(data) > 1200, 'should have downloaded some bytes'
def cleanup_snapshots(harn): """ remove old snapshots TODO: [ ] - keep the top epochs for every metric """ snapshots = harn.prev_snapshots() existing_epochs = sorted([ int(parse.parse('{}_epoch_{num:d}.pt', path).named['num']) for path in snapshots ]) num_keep = harn.config['num_keep'] def _epochs_to_remove(existing_epochs, num_keep): """ doctest: >>> import netharn as nh >>> harn = FitHarn({}) >>> rng = np.random.RandomState(0) >>> harn.monitor = nh.Monitor(minimize=['loss'], maximize=['miou']) >>> for epoch in range(200): >>> harn.monitor.update(epoch, {'loss': rng.rand(), >>> 'miou': rng.rand()}) >>> existing_epochs = list(range(0, 200, 4)) >>> num_keep = 10 """ keep = set() recent = existing_epochs[-num_keep:] keep.update(recent) if harn.monitor: for best_epochs in harn.monitor.best_epochs().values(): best = ub.oset(best_epochs).intersection(existing_epochs) keep.update(best[:num_keep]) to_remove = set(existing_epochs) - keep return to_remove epoch_to_fpath = dict(zip(existing_epochs, snapshots)) to_remove = _epochs_to_remove(existing_epochs, num_keep) for fpath in ub.take(epoch_to_fpath, to_remove): ub.delete(fpath)
def demo(config=None): """ Runs the algorithm end-to-end. """ # dataset = 'test' # dataset = 'haul83' if config is None: import argparse parser = argparse.ArgumentParser(description='Standalone camtrawl demo') parser.add_argument('--cal', help='path to matlab or numpy stereo calibration file', default='cal.npz') parser.add_argument('--left', help='path to directory containing left images', default='left') parser.add_argument('--right', help='path to directory containing right images', default='right') parser.add_argument('--out', help='output directory', default='./out') parser.add_argument('-f', '--overwrite', action='store_true', help='will delete any existing output') parser.add_argument('--draw', action='store_true', help='draw visualization of algorithm steps') parser.add_argument('--dataset', default=None, help='Developer convenience assumes you have demo ' ' data downloaded and available. If you dont ' ' specify the other args.') args = parser.parse_args() config = args.__dict__.copy() config = FrozenKeyDict(config) if config['dataset'] is not None: img_path1, img_path2, cal_fpath = demodata_input(dataset=config['dataset']) config['left'] = img_path1 config['right'] = img_path2 config['cal'] = cal_fpath img_path1, img_path2, cal_fpath = ub.take(config, [ 'left', 'right', 'cal']) out_dpath = config['out'] logging.info('Demo Config = {!r}'.format(config)) ub.ensuredir(out_dpath) # ---- # Choose parameter configurations # ---- # Use GMM based model gmm_params = { } triangulate_params = { } DRAWING = config['draw'] # ---- # Initialize algorithms # ---- detector1 = ctalgo.GMMForegroundObjectDetector(**gmm_params) detector2 = ctalgo.GMMForegroundObjectDetector(**gmm_params) triangulator = ctalgo.FishStereoMeasurments(**triangulate_params) try: import pyfiglet print(pyfiglet.figlet_format('CAMTRAWL', font='cybermedium')) except ImportError: logging.debug('pyfiglet is not installed') print('========') print('CAMTRAWL') print('========') logging.info('Detector1 Config: ' + ub.repr2(detector1.config, nl=1)) logging.info('Detector2 Config: ' + ub.repr2(detector2.config, nl=1)) logging.info('Triangulate Config: ' + ub.repr2(triangulator.config, nl=1)) logging.info('DRAWING = {!r}'.format(DRAWING)) cal = ctalgo.StereoCalibration.from_file(cal_fpath) stream = StereoFrameStream(img_path1, img_path2) stream.preload() # HACK IN A BEGIN FRAME if len(stream) > 2200: stream.seek(2200) # ---- # Run the algorithm # ---- # n_frames = 2000 # stream.aligned_frameids = stream.aligned_frameids[:stream.index] measure_fpath = join(out_dpath, 'measurements.csv') if exists(measure_fpath): if config['overwrite']: ub.delete(measure_fpath) else: raise IOError('Measurement path already exists') output_file = open(measure_fpath, 'a') if DRAWING: drawing_dpath = join(out_dpath, 'visual') if exists(drawing_dpath): if config['overwrite']: ub.delete(drawing_dpath) else: raise IOError('Output path already exists') ub.ensuredir(drawing_dpath) headers = ['current_frame', 'fishlen', 'range', 'error', 'dz', 'box_pts1', 'box_pts2'] output_file.write(','.join(headers) + '\n') output_file.flush() measurements = [] logger.info('begin camtrawl iteration') import tqdm # prog = ub.ProgIter(iter(stream), total=len(stream), desc='camtrawl demo', # clearline=False, freq=1, adjust=False) prog = tqdm.tqdm(iter(stream), total=len(stream), desc='camtrawl demo', leave=True) def csv_repr(d): if isinstance(d, np.ndarray): d = d.tolist() s = repr(d) return s.replace('\n', '').replace(',', ';').replace(' ', '') for frame_num, (frame_id, img1, img2) in enumerate(prog): logger.debug('frame_num = {!r}'.format(frame_num)) detections1 = list(detector1.detect(img1)) detections2 = list(detector2.detect(img2)) masks1 = detector1._masks masks2 = detector2._masks any_detected = len(detections1) > 0 or len(detections2) > 0 if any_detected: assignment, assign_data, cand_errors = triangulator.find_matches( cal, detections1, detections2) # Append assignments to the measurements for data in assign_data: data['current_frame'] = int(frame_id) measurements.append(data) line = ','.join([csv_repr(d) for d in ub.take(data, headers)]) output_file.write(line + '\n') output_file.flush() else: cand_errors = None assignment, assign_data = None, None if DRAWING >= 2 or (DRAWING and any_detected): DRAWING = 3 stacked = DrawHelper.draw_stereo_detections(img1, detections1, masks1, img2, detections2, masks2, assignment, assign_data, cand_errors) if cv2.__version__.startswith('2'): cv2.putText(stacked, text='frame #{}, id={}'.format(frame_num, frame_id), org=(10, 50), fontFace=cv2.FONT_HERSHEY_SIMPLEX, fontScale=1, color=(255, 0, 0), thickness=2, lineType=cv2.cv.CV_AA) else: stacked = cv2.putText(stacked, text='frame #{}, id={}'.format(frame_num, frame_id), org=(10, 50), fontFace=cv2.FONT_HERSHEY_SIMPLEX, fontScale=1, color=(255, 0, 0), thickness=2, lineType=cv2.LINE_AA) cv2.imwrite(drawing_dpath + '/mask{}_draw.png'.format(frame_id), stacked) output_file.close() n_total = len(measurements) logger.info('n_total = {!r}'.format(n_total)) if n_total: all_errors = np.array([d['error'] for d in measurements]) all_lengths = np.array([d['fishlen'] for d in measurements]) logger.info('ave_error = {:.2f} +- {:.2f}'.format(all_errors.mean(), all_errors.std())) logger.info('ave_lengths = {:.2f} +- {:.2f} '.format(all_lengths.mean(), all_lengths.std())) return measurements