def initial_weights(): """ Weights pretrained trained ImageNet by yolo9000-pytorch """ import os url = 'https://data.kitware.com/api/v1/file/5ab513438d777f068578f1d0/download' dpath = ub.ensure_app_cache_dir('clab/yolo_v2') fname = 'darknet19.weights.npz' dest = os.path.join(dpath, fname) if not os.path.exists(dest): command = 'curl -X GET {} > {}'.format(url, dest) ub.cmd(command, verbout=1, shell=True) # url = 'http://acidalia.kitware.com:8000/weights/darknet19.weights.npz' # npz_fpath = ub.grabdata(url, dpath=ub.ensure_app_cache_dir('clab')) # convert to torch weights npz_fpath = dest torch_fpath = ub.augpath(npz_fpath, ext='.pt') if not os.path.exists(torch_fpath): # hack to transform initial state model = Darknet19(num_classes=20) model.load_from_npz(npz_fpath, num_conv=18) torch.save(model.state_dict(), torch_fpath) # from clab import xpu_device # xpu = xpu_device.XPU('gpu') # xpu.load(torch_fpath) # torch.load(torch_fpath) return torch_fpath
def test_cmd_tee_badmethod(): """ pytest tests/test_cmd.py::test_cmd_tee_badmethod """ command = 'python -c "for i in range(100): print(str(i))"' with pytest.raises(ValueError): ub.cmd(command, verbose=2, tee_backend='bad tee backend')
def editfile(fpath, verbose=True): # nocover """ DEPRECATED: This has been ported to xdev, please use that version. Opens a file or code corresponding to a live python object in your preferred visual editor. This function is mainly useful in an interactive IPython session. The visual editor is determined by the `VISUAL` environment variable. If this is not specified it defaults to gvim. Args: fpath (PathLike): a file path or python module / function verbose (int): verbosity Example: >>> # xdoctest: +SKIP >>> # This test interacts with a GUI frontend, not sure how to test. >>> import ubelt as ub >>> ub.editfile(ub.util_platform.__file__) >>> ub.editfile(ub) >>> ub.editfile(ub.editfile) """ from six import types import ubelt as ub import warnings schedule_deprecation(**DEP_SCHEDULE_1) warnings.warn('Please use xdev.editfile instead', DeprecationWarning) if not isinstance(fpath, string_types): if isinstance(fpath, types.ModuleType): fpath = fpath.__file__ else: fpath = sys.modules[fpath.__module__].__file__ fpath_py = fpath.replace('.pyc', '.py') if exists(fpath_py): fpath = fpath_py if verbose: print('[ubelt] editfile("{}")'.format(fpath)) editor = os.environ.get('VISUAL', 'gvim') if not ub.find_exe(editor): warnings.warn('Cannot find visual editor={}'.format(editor), UserWarning) # Try and fallback on commonly installed editor alt_candidates = [ 'gedit', 'TextEdit' 'Notepad', # 'todo vscode', # 'todo atom', # 'todo sublime', ] for cand in alt_candidates: if ub.find_exe(cand): editor = cand if not exists(fpath): raise IOError('Cannot start nonexistent file: %r' % fpath) ub.cmd([editor, fpath], fpath, detach=True)
def _autojit_cython(pyx_fpath, verbose=1): """ This idea is that given a pyx file, we try to compile it. We write a stamp file so subsequent calls should be very fast as long as the source pyx has not changed. Parameters ---------- pyx_fpath : str path to the pyx file verbose : int higher is more verbose. """ import shutil # TODO: move necessary ubelt utilities to nx.utils? # Separate this into its own util? if shutil.which("cythonize"): pyx_dpath = dirname(pyx_fpath) # Check if the compiled library exists pyx_base = splitext(basename(pyx_fpath))[0] SO_EXTS = _platform_pylib_exts() so_fname = False for fname in os.listdir(pyx_dpath): if fname.startswith(pyx_base) and fname.endswith(SO_EXTS): so_fname = fname break try: # Currently this functionality depends on ubelt. # We could replace ub.cmd with subprocess.check_call and ub.augpath # with os.path operations, but hash_file and CacheStamp are harder # to replace. We can use "liberator" to statically extract these # and add them to nx.utils though. import ubelt as ub except Exception: return False else: if so_fname is False: # We can compute what the so_fname will be if it doesnt exist so_fname = pyx_base + SO_EXTS[0] so_fpath = join(pyx_dpath, so_fname) depends = [ub.hash_file(pyx_fpath, hasher="sha1")] stamp_fname = ub.augpath(so_fname, ext=".jit.stamp") stamp = ub.CacheStamp( stamp_fname, dpath=pyx_dpath, product=so_fpath, depends=depends, verbose=verbose, ) if stamp.expired(): ub.cmd("cythonize -i {}".format(pyx_fpath), verbose=verbose, check=True) stamp.renew() return True
def test_cmd_tee_select(): command = 'python -c "for i in range(100): print(str(i))"' result = ub.cmd(command, verbose=1, tee_backend='select') assert result['out'] == '\n'.join(list(map(str, range(100)))) + '\n' command = 'python -c "for i in range(100): print(str(i))"' result = ub.cmd(command, verbose=0, tee_backend='select') assert result['out'] == '\n'.join(list(map(str, range(100)))) + '\n'
def benchmark_multi_or_combined_import(): """ Combining all imports into a single line is slightly faster """ import ubelt as ub attr_names = [ 'altsep', 'basename', 'commonpath', 'commonprefix', 'curdir', 'defpath', 'devnull', 'dirname', 'exists', 'expanduser', 'expandvars', 'extsep', 'genericpath', 'getatime', 'getctime', 'getmtime', 'getsize', 'isabs', 'isdir', 'isfile', 'islink', 'ismount', 'join', 'lexists', 'normcase', 'normpath', 'os', 'pardir', 'pathsep', 'realpath', 'relpath', 'samefile', ] combined_lines = 'from os.path import ' + ', '.join(attr_names) multi_lines = '; '.join( ['from os.path import ' + name for name in attr_names]) import timerit ti = timerit.Timerit(10, bestof=3, verbose=2) for timer in ti.reset('combined_lines'): with timer: ub.cmd('python -c "{}"'.format(combined_lines), check=True) for timer in ti.reset('multi_lines'): with timer: info = ub.cmd('python -c "{}"'.format(multi_lines))
def demo_weights(): """ Weights trained on VOC by yolo9000-pytorch """ import os url = 'https://data.kitware.com/api/v1/item/5ab13b0e8d777f068578e251/download' dpath = ub.ensure_app_cache_dir('clab/yolo_v2') fname = 'yolo-voc.weights.pt' dest = os.path.join(dpath, fname) if not os.path.exists(dest): command = 'curl -X GET {} > {}'.format(url, dest) ub.cmd(command, verbout=1, shell=True) return dest
def ensure_nongit_plugins(): try: import ubelt as ub import REPOS1 BUNDLE_DPATH = REPOS1.BUNDLE_DPATH for url in REPOS1.VIM_NONGIT_PLUGINS: fpath = grab_zipped_url(url, download_dir=BUNDLE_DPATH) if fpath.endswith('.vba'): cmd_ = 'vim ' + fpath + ' -c "so % | q"' ub.cmd(cmd_, verbose=3) print('url = %r' % (url,)) except ImportError: print('Cant do nongit plugins without utool')
def test_version_agreement(): """ Ensure that line_profiler and kernprof have the same version info """ import ubelt as ub info1 = ub.cmd('python -m line_profiler --version') info2 = ub.cmd('python -m kernprof --version') # Strip local version suffixes version1 = info1['out'].strip().split('+')[0] version2 = info2['out'].strip().split('+')[0] assert version2 == version1, 'kernprof and line_profiler must be in sync'
def check_relationships(branches): ancestors = {b: set() for b in branches} length = len(branches) * (len(branches) - 1) for b1, b2 in ub.ProgIter(it.combinations(branches, 2), length=length): ret = ub.cmd('git merge-base --is-ancestor {} {}'.format(b1, b2))['ret'] if ret == 0: ancestors[b1].add(b2) ret = ub.cmd('git merge-base --is-ancestor {} {}'.format(b2, b1))['ret'] if ret == 0: ancestors[b2].add(b1) print('<key> is an ancestor of <value>') print(ub.repr2(ancestors)) descendants = {b: set() for b in branches} for key, others in ancestors.items(): for o in others: descendants[o].add(key) print('<key> descends from <value>') print(ub.repr2(descendants)) import plottool as pt import networkx as nx G = nx.DiGraph() G.add_nodes_from(branches) for key, others in ancestors.items(): for o in others: # G.add_edge(key, o) G.add_edge(o, key) from networkx.algorithms.connectivity.edge_augmentation import collapse flag = True G2 = G while flag: flag = False for u, v in list(G2.edges()): if G2.has_edge(v, u): G2 = collapse(G2, [[u, v]]) node_relabel = ub.ddict(list) for old, new in G2.graph['mapping'].items(): node_relabel[new].append(old) G2 = nx.relabel_nodes(G2, {k: '\n'.join(v) for k, v in node_relabel.items()}) flag = True break G3 = nx.transitive_reduction(G2) pt.show_nx(G3, arrow_width=1.5, prog='dot', layoutkw=dict(prog='dot')) pt.zoom_factory() pt.pan_factory() pt.plt.show()
def editfile(fpath, verbose=True): """ Opens a file or code corresponding to a live python object in your preferred visual editor. This function is mainly useful in an interactive IPython session. The visual editor is determined by the `VISUAL` environment variable. If this is not specified it defaults to gvim. Args: fpath (PathLike): a file path or python module / function verbose (int): verbosity DisableExample: >>> # This test interacts with a GUI frontend, not sure how to test. >>> import xdev >>> ub.editfile(xdev.misc.__file__) >>> ub.editfile(xdev) >>> ub.editfile(xdev.editfile) """ if not isinstance(fpath, six.string_types): if isinstance(fpath, types.ModuleType): fpath = fpath.__file__ else: fpath = sys.modules[fpath.__module__].__file__ fpath_py = fpath.replace('.pyc', '.py') if exists(fpath_py): fpath = fpath_py if verbose: print('[xdev] editfile("{}")'.format(fpath)) editor = os.environ.get('VISUAL', 'gvim') if not ub.find_exe(editor): import warnings warnings.warn('Cannot find visual editor={}'.format(editor), UserWarning) # Try and fallback on commonly installed editor alt_candidates = [ 'gedit', 'TextEdit' 'Notepad', ] for cand in alt_candidates: if ub.find_exe(cand): editor = cand if not exists(fpath): raise IOError('Cannot start nonexistant file: %r' % fpath) ub.cmd([editor, fpath], fpath, detach=True)
def benchmark_import_time(): import ubelt as ub info = ub.cmd('python -X importtime -c "import ubelt"') print(info['err']) print(info['err'].rstrip().split('\n')[-1]) info = ub.cmd('python -X importtime -c "from concurrent import futures"') print(info['err'].rstrip().split('\n')[-1]) info = ub.cmd('python -X importtime -c "import numpy"') print(info['err'].rstrip().split('\n')[-1]) info = ub.cmd('python -X importtime -c "import hashlib"') print(info['err'].rstrip().split('\n')[-1]) info = ub.cmd('python -X importtime -c "import typing"') print(info['err'].rstrip().split('\n')[-1]) info = ub.cmd('python -X importtime -c "import json"') print(info['err'].rstrip().split('\n')[-1]) info = ub.cmd('python -X importtime -c "import uuid"') print(info['err'].rstrip().split('\n')[-1]) info = ub.cmd('python -X importtime -c "import xxhash"') print(info['err'].rstrip().split('\n')[-1])
def clone(repo, recursive=False): print('[git] check repo exists at %s' % (repo.dpath)) if recursive: args = '--recursive' else: args = '' if not exists(repo.dpath): import ubelt as ub os.chdir(dirname(repo.dpath)) print('repo.default_branch = %r' % (repo.default_branch,)) if repo.default_branch is not None: args += ' -b {}'.format(repo.default_branch) ub.cmd('git clone {args} {url}'.format(args=args, url=repo.url), verbose=2)
def issue(repo, command, sudo=False, dry=False, error='raise', return_out=False): """ issues a command on a repo Example: >>> # DISABLE_DOCTEST >>> repo = dirname(dirname(ub.__file__)) >>> command = 'git status' >>> sudo = False >>> result = repocmd(repo, command, sudo) >>> print(result) """ WIN32 = sys.platform.startswith('win32') if WIN32: assert not sudo, 'cant sudo on windows' if command == 'short_status': return repo.short_status() command_list = [command] cmdstr = '\n '.join([cmd_ for cmd_ in command_list]) if not dry: import ubelt as ub print('+--- *** repocmd(%s) *** ' % (cmdstr,)) print('repo=%s' % ub.color_text(repo.dpath, 'yellow')) verbose = True with repo.chdir_context(): ret = None for count, command in enumerate(command_list): if dry: print(command) continue if not sudo or WIN32: cmdinfo = ub.cmd(command, verbose=1) out, err, ret = ub.take(cmdinfo, ['out', 'err', 'ret']) else: out, err, ret = ub.cmd('sudo ' + command) if verbose > 1: print('ret(%d) = %r' % (count, ret,)) if ret != 0: if error == 'raise': raise Exception('Failed command %r' % (command,)) elif error == 'return': return out else: raise ValueError('unknown flag error=%r' % (error,)) if return_out: return out if not dry: print('L____')
def test_xdoc_cli_version(): """ CommandLine: python -m xdoctest -m ~/code/xdoctest/testing/test_entry_point.py test_xdoc_cli_version """ import sys if sys.platform.startswith('win32'): pytest.skip() import xdoctest from xdoctest import __main__ print('xdoctest = {!r}'.format(xdoctest)) print('__main__ = {!r}'.format(__main__)) retcode = __main__.main(argv=['--version']) print('retcode = {!r}'.format(retcode)) assert retcode == 0 import xdoctest print('xdoctest = {!r}'.format(xdoctest)) sys.executable try: import ubelt as ub except ImportError: info = cmd(sys.executable + ' -m xdoctest --version') else: info = ub.cmd(sys.executable + ' -m xdoctest --version') print('info = {!r}'.format(info)) print('xdoctest.__version__ = {!r}'.format(xdoctest.__version__)) assert xdoctest.__version__ in info['out']
def lspci(): """ list all PCI devices lspci is a utility for displaying information about PCI buses in the system and devices connected to them. References: https://diego.assencio.com/?index=649b7a71b35fc7ad41e03b6d0e825f07 Returns: List[Dict]: each dict is an item that contains keys: 'Slot', 'Class', 'Vendor', 'Device', 'SVendor', 'SDevice', 'Rev' Example: items = lspci() [item['Class'] for item in items] """ import re info = ub.cmd('lspci -vmm') parts = re.split('\n *\n', info['out']) items = [] for part in parts: part = part.strip() if part: item = dict([line.split(':\t') for line in part.split('\n')]) items.append(item) return items
def session_info(dpath): """ Stats about a training session """ info = {} snap_dpath = join(dpath, 'torch_snapshots') snapshots = os.listdir(snap_dpath) if exists(snap_dpath) else [] dpath = realpath(dpath) if True: # Determine if we are pointed to by a "name" directory or not name = basename(dirname(dpath)) info['name'] = name fitdir = dirname(dirname(dirname(dpath))) name_dpath = join(fitdir, 'name', name) try: target = realpath(ub.util_links._readlink(name_dpath)) except Exception: target = None info['linked'] = (target == dpath) info['dpath'] = dpath info['num_snapshots'] = len(snapshots) info['size'] = float(ub.cmd('du -s ' + dpath)['out'].split('\t')[0]) if len(snapshots) > 0: contents = [join(dpath, c) for c in os.listdir(dpath)] timestamps = [get_file_info(c)['last_modified'] for c in contents] unixtime = max(timestamps) dt = datetime.datetime.fromtimestamp(unixtime) info['last_modified'] = dt return info
def _git_sync(host, remote=None, dry=False): cwd = os.getcwd() relpwd = relpath(cwd, expanduser('~')) parts = [ 'git commit -am "wip"', ] if remote: parts += [ 'git push {remote}', 'ssh {host} "cd {relpwd} && git pull {remote}"' ] else: parts += ['git push', 'ssh {host} "cd {relpwd} && git pull"'] kw = dict(host=host, relpwd=relpwd, remote=remote) for part in parts: command = part.format(**kw) if not dry: result = ub.cmd(command, verbose=2) retcode = result['ret'] if command.startswith('git commit') and retcode == 1: pass elif retcode != 0: print('git-sync cannot continue. retcode={}'.format(retcode)) break else: print(command)
def motherboard_info(): """ REQUIRES SUDO xdoctest -m ~/misc/notes/buildapc.py motherboard_info """ import re info = ub.cmd('sudo dmidecode -t 9') pcie_slots = [] chunks = info['out'].split('\n\n') for chunk in chunks: item = {} for line in chunk.split('\n'): # doesn't get all data correctly (e.g. characteristics) parts = re.split('\t*:', line, maxsplit=1) if len(parts) == 2: key, val = parts key = key.strip() val = val.strip() if key in item: raise KeyError(f'key={key} already exists') item[key] = val if item: item = ub.map_keys(slugify_key, item) pcie_slots.append(item) pcie_usage = ub.dict_hist(item['current_usage'] for item in pcie_slots) _varied = varied_values(pcie_slots, min_variations=0) _varied = ub.map_keys(slugify_key, _varied) unvaried = {k: ub.peek(v) for k, v in _varied.items() if len(v) == 1} varied = {k: v for k, v in _varied.items() if len(v) > 1} print(info['out'])
def _query_nvidia_smi(mode, fields): """ Runs nvidia smi in query mode Args: mode (str): the query cli flag to pass to nvidia-smi fields (List[str]): csv header fields to query Returns: List[Dict[str, str]]: parsed csv output """ header = ','.join(fields) command = [ 'nvidia-smi', '--{}={}'.format(mode, header), '--format=csv,noheader' ] info = ub.cmd(command) if info['ret'] != 0: print(info['out']) print(info['err']) raise Exception('unable to call nvidia-smi: ret={}'.format( info['ret'])) rows = [] for line in info['out'].split('\n'): line = line.strip() if line: parts = [p.strip() for p in line.split(',')] row = ub.dzip(fields, parts) rows.append(row) return rows
def initialize_docs(): from os.path import join import setup setupkw = setup.setupkw full_version = setup.parse_version() short_version = '.'.join(full_version.split('.')[0:2]) doc_dpath = join('.', 'docs') exe = 'sphinx-apidoc' args = [ exe, '--force', '--full', '--output-dir="{}"'.format(doc_dpath), '--doc-author="{}"'.format(setupkw['author']), '--doc-version="{}"'.format(short_version), '--doc-release="{}"'.format(full_version), '--maxdepth="8"', # '--ext-autodoc', # '--ext-ifconfig', # '--ext-githubpages', # '--ext-mathjax', setupkw['name'], ] cmdstr = ' '.join(args) import ubelt as ub result = ub.cmd(cmdstr, verbose=2) assert result['ret'] == 0
def barron_mwe_error_unicode_literal(): import textwrap import redbaron # This works properly text = textwrap.dedent(''' p1, p2 = (1, 2) ''').strip('\n') red = redbaron.RedBaron(text) # But this fails when we use unicode symbols for identifiers text = textwrap.dedent(''' ρ1, ρ2 = (1, 2) ''').strip('\n') red = redbaron.RedBaron(text) # Still fails with a single unicdoe element text = textwrap.dedent(''' ρ2 = 2 ''').strip('\n') red = redbaron.RedBaron(text) # Still fails with different unicode identifiers even with explicit # unicode literal futures text = textwrap.dedent(''' from __future__ import unicode_literals θ = 2 ''').strip('\n') red = redbaron.RedBaron(text) # System information import sys print('sys.version_info = {!r}'.format(sys.version_info)) import ubelt as ub _ = ub.cmd('pip list | grep redbaron', shell=True, verbose=1)
def test_startup_time(): import ubelt as ub cmdlist = [ '/usr/bin/python2', '/usr/bin/python3', # '/home/joncrall/venv3.6/bin/python3', '/home/joncrall/.local/conda/envs/py36/bin/python', ] for exe in cmdlist: print('\n==========\nexe = {!r}'.format(exe)) for opts in ['', '-B', '-S', '-OO']: args = ' -c "import argparse"' command = exe + ' ' + opts + args for timer in ub.Timerit(50, bestof=5, label=repr(command), verbose=1): with timer: ub.cmd(command)
def download_phase1_annots(): """ References: http://www.viametoolkit.org/cvpr-2018-workshop-data-challenge/challenge-data-description/ https://challenge.kitware.com/api/v1/item/5ac385f056357d4ff856e183/download https://challenge.kitware.com/girder#item/5ac385f056357d4ff856e183 CommandLine: python ~/code/baseline-viame-2018/viame_wrangler/config.py download_phase0_annots --datadir=~/data """ cfg = Config({'datadir': '~/data/viame-challenge-2018'}) dpath = ub.truepath(cfg.datadir) fname = 'phase1-annotations.tar.gz' hash = '5ac385f056357d4ff856e183' url = 'https://challenge.kitware.com/api/v1' # FIXME: broken dest = _grabdata_girder(dpath, fname, hash, url, force=False) unpacked = join(dpath, fname.split('.')[0]) if not os.path.exists(unpacked): info = ub.cmd('tar -xvzf "{}" -C "{}"'.format(dest, dpath), verbose=2, verbout=1) assert info['ret'] == 0
def build_info(session): """ Stats about a training session """ info = session.info info['linked'] = session.is_linked dpath = realpath(session.dpath) best_snapshot_fpath = join(dpath, 'best_snapshot.pt') details = session.details details['best_snapshot'] = best_snapshot_fpath if exists( best_snapshot_fpath) else None details['deployed'] = [ p for p in glob.glob(join(dpath, '*.zip')) if not ub.util_links.islink(p) ] details['checkpoints'] = session.checkpoints info['dpath'] = dpath info['has_deploy'] = bool(details['deployed']) info['has_best'] = bool(details['best_snapshot']) info['num_snapshots'] = len(details['checkpoints']) info['size'] = float(ub.cmd('du -s ' + dpath)['out'].split('\t')[0]) if len(details['checkpoints']) > 0: contents = [join(dpath, c) for c in os.listdir(dpath)] timestamps = [ get_file_info(c)['last_modified'] for c in contents if exists(c) ] unixtime = max(timestamps) dt = datetime.datetime.fromtimestamp(unixtime) info['last_modified'] = dt return info, details
def short_status(repo): r""" Example: >>> repo = Repo(dpath=ub.truepath('.')) >>> result = repo.short_status() >>> print(result) """ import ubelt as ub prefix = repo.dpath with ChdirContext(repo.dpath, verbose=False): info = ub.cmd('git status', verbose=False) out = info['out'] # parse git status is_clean_msg1 = 'Your branch is up-to-date with' is_clean_msgs = [ 'nothing to commit, working directory clean', 'nothing to commit, working tree clean', ] msg2 = 'nothing added to commit but untracked files present' needs_commit_msgs = [ 'Changes to be committed', 'Changes not staged for commit', 'Your branch is ahead of', ] suffix = '' if is_clean_msg1 in out and any(msg in out for msg in is_clean_msgs): suffix += ub.color_text('is clean', 'blue') if msg2 in out: suffix += ub.color_text('has untracked files', 'yellow') if any(msg in out for msg in needs_commit_msgs): suffix += ub.color_text('has changes', 'red') print(prefix + ' ' + suffix)
def main(): """ Checks that the latest wheels on pypi agree with the gpg key """ import requests package_name = 'ubelt' url = "https://pypi.python.org/pypi/{}/json".format(package_name) package = requests.get(url).json() max_ver = max(package["releases"].keys()) # ... check compatibility latest_wheel_info_list = package['releases'][max_ver] for wheel_info in latest_wheel_info_list: import ubelt as ub whl_fpath = ub.grabdata(wheel_info['url'], hash_prefix=wheel_info['digests']['sha256'], hasher='sha256') if not wheel_info['has_sig']: raise ValueError('info says no sig') sig_fpath = ub.download(wheel_info['url'] + '.asc', ) info = ub.cmd('gpg --verify {} {}'.format(sig_fpath, whl_fpath), verbose=3) assert info['ret'] == 0
def ensure_voc_data(VOCDataset, dpath=None, force=False, years=[2007, 2012]): """ Download the Pascal VOC 2007 data if it does not already exist. CommandLine: python -m netharn.data.voc VOCDataset.ensure_voc_data Example: >>> # SCRIPT >>> # xdoc: +REQUIRES(--voc) >>> from netharn.data.voc import * # NOQA >>> VOCDataset.ensure_voc_data() """ if dpath is None: dpath = ub.expandpath('~/data/VOC') devkit_dpath = join(dpath, 'VOCdevkit') # if force or not exists(devkit_dpath): ub.ensuredir(dpath) fpath1 = ub.grabdata( 'http://host.robots.ox.ac.uk/pascal/VOC/voc2007/VOCdevkit_08-Jun-2007.tar', dpath=dpath) if force or not exists(join(dpath, 'VOCdevkit', 'VOCcode')): ub.cmd('tar xvf "{}" -C "{}"'.format(fpath1, dpath), verbout=1) if 2007 in years: # VOC 2007 train+validation data fpath2 = ub.grabdata( 'http://host.robots.ox.ac.uk/pascal/VOC/voc2007/VOCtrainval_06-Nov-2007.tar', dpath=dpath) if force or not exists( join(dpath, 'VOCdevkit', 'VOC2007', 'ImageSets', 'Main', 'bird_trainval.txt')): ub.cmd('tar xvf "{}" -C "{}"'.format(fpath2, dpath), verbout=1) # VOC 2007 test data fpath3 = ub.grabdata( 'http://host.robots.ox.ac.uk/pascal/VOC/voc2007/VOCtest_06-Nov-2007.tar', dpath=dpath) if force or not exists( join(dpath, 'VOCdevkit', 'VOC2007', 'ImageSets', 'Main', 'bird_test.txt')): ub.cmd('tar xvf "{}" -C "{}"'.format(fpath3, dpath), verbout=1) if 2012 in years: # VOC 2012 train+validation data fpath4 = ub.grabdata( 'https://pjreddie.com/media/files/VOCtrainval_11-May-2012.tar', dpath=dpath) if force or not exists( join(dpath, 'VOCdevkit', 'VOC2012', 'ImageSets', 'Main', 'bird_trainval.txt')): ub.cmd('tar xvf "{}" -C "{}"'.format(fpath4, dpath), verbout=1) return devkit_dpath
def main(): import ubelt as ub want = {} versioned = {} unversioned = [] gitversioned = {} for line in set(open('requirements.txt').read().split('\n')): line = line.strip() if not line.startswith('#'): if '==' in line: key, version = line.split('==') if '>=' in line: key, version = line.split('>=') if '#egg=' in line: version, key = line.split('#egg=') else: key = line version = None if key: want[key] = version print('want ' + ub.repr2(want, si=True)) out = ub.cmd('pip freeze')['out'] for line in out.split('\n'): line = line.strip() item = line.split('==') if len(item) == 2: key, version = item versioned[key] = version else: if '#egg=' in line: githash, key = line.split('#egg=') gitversioned[key] = githash else: if line: unversioned.append(line) versioned.update(gitversioned) # print('versioned ' + ub.repr2(versioned, si=True)) # print('gitversioned ' + ub.repr2(gitversioned, si=True)) # print('unversioned ' + ub.repr2(unversioned, si=True)) unknown = set(want) - set(versioned.keys()) if unknown: print('unknown = {}'.format(ub.repr2(unknown, si=True))) have = ub.dict_subset(versioned, set(versioned.keys()).intersection(set(want))) print('have ' + ub.repr2(have, si=True)) outlines = [] for key, value in have.items(): if value.startswith('-e'): outlines.append('{}#egg={}'.format(value, key)) else: outlines.append('{}=={}'.format(key, value)) print('\n'.join(sorted(outlines)))
def git_fix_upstream(remote, dry=False): repo = git.Repo() branch = repo.active_branch.name assert remote in [m.name for m in repo.remotes] comment_fmt = 'git branch -u --set-upstream-to={remote}/{branch} {branch}' command = comment_fmt.format(remote=remote, branch=branch) if dry: print(command) else: import ubelt as ub ub.cmd(command, verbose=2) # TODO: # if the command fails simply add the right lines to .git/config '''
def test_cli(): """ Test command line interaction with kernprof and line_profiler. References: https://github.com/pyutils/line_profiler/issues/9 CommandLine: xdoctest -m ~/code/line_profiler/tests/test_cli.py test_cli """ # Create a dummy source file import ubelt as ub code = ub.codeblock(''' @profile def my_inefficient_function(): a = 0 for i in range(10): a += i for j in range(10): a += j if __name__ == '__main__': my_inefficient_function() ''') import tempfile tmp_dpath = tempfile.mkdtemp() tmp_src_fpath = join(tmp_dpath, 'foo.py') ub.writeto(tmp_src_fpath, code) # Run kernprof on it info = ub.cmd('kernprof -l {}'.format(tmp_src_fpath), verbose=3, cwd=tmp_dpath) assert info['ret'] == 0 tmp_lprof_fpath = join(tmp_dpath, 'foo.py.lprof') tmp_lprof_fpath info = ub.cmd('{} -m line_profiler {}'.format(executable, tmp_lprof_fpath), cwd=tmp_dpath, verbose=3) assert info['ret'] == 0 # Check for some patterns that should be in the output assert '% Time' in info['out'] assert '7 100' in info['out']
def test_cmd_tee_auto(): """ pytest ubelt/tests/test_cmd.py -k tee_backend pytest ubelt/tests/test_cmd.py """ command = 'python -c "for i in range(100): print(str(i))"' result = ub.cmd(command, verbose=0, tee_backend='auto') assert result['out'] == '\n'.join(list(map(str, range(100)))) + '\n'
def test_cmd_multiline_stdout(): """ python ubelt/tests/test_cmd.py test_cmd_multiline_stdout pytest ubelt/tests/test_cmd.py::test_cmd_multiline_stdout """ command = 'python -c "for i in range(10): print(str(i))"' result = ub.cmd(command, verbose=0) assert result['out'] == '\n'.join(list(map(str, range(10)))) + '\n'
def run(py): bash_cmd = ' && '.join([ 'source $HOME/.bashrc', activate_cmds[py], 'python {} access_cache'.format(script), ]) sh_cmd = 'bash -c "{}"'.format(bash_cmd) info = ub.cmd(sh_cmd, shell=True, verbose=3) return info
def setup_git_scripts(): dpath = dirname(__file__) git_sh_scripts = list(glob.glob(join(dpath, 'git_*.sh'))) git_py_scripts = list(glob.glob(join(dpath, 'git_*.py'))) github_py_scripts = list(glob.glob(join(dpath, 'github_*.py'))) git_scripts = git_py_scripts + git_sh_scripts + github_py_scripts for fpath in git_scripts: fname = basename(fpath) script_text = (SCRIPT_HEADER + '\n\n' + SCRIPT_FOOTER_FMT.format(fname=fname) + '\n') new_fname = splitext(fname)[0].replace('_', '-') new_fpath = join(dpath, 'scripts', new_fname) print('writing script {!r}'.format(new_fname)) ub.writeto(new_fpath, script_text) ub.cmd('chmod +x ' + new_fpath) ub.cmd('chmod +x ' + fpath)
def github_add_fork(new_user, dry=False): out = ub.cmd('git remote -v')['out'] parts = [p.split(' ')[0] for p in out.split('\n') if p] remote_to_url = dict([p.split('\t') for p in parts]) server = None known_servers = [ 'github.com', 'gitlab.com', ] repo_names = set() for remote, url in remote_to_url.items(): if new_user == remote: raise Exception('User already exists') for _server in known_servers: if _server in url: user, gitname = url.split(_server)[1][1:].split('/') repo_names.add(gitname) server = _server if len(repo_names) != 1: raise Exception('Conflicting repo names') gitname = list(repo_names)[0] if server is None: raise Exception('Unknown server') new_url = 'https://{}/{}/{}'.format(server, new_user, gitname) command = 'git remote add {} {}'.format(new_user, new_url) if not dry: ub.cmd(command, verbose=3) else: print('Dry run: would execute: {}'.format(command))
def ldd(lib_fpath): patterns = [ '\t{link:S}{:s}=>{:s}{abspath:S}{:s}({addr:S})', '\t{abspath:S}{:s}=>{:s}({addr:S})', '\t{abspath:S}{:s}=>{:s}not found', '\t{abspath:S}{:s}({addr:S})', ] lines = ub.cmd('ldd ' + lib_fpath)['out'].splitlines() for line in lines: if 'statically linked' in line: continue for pattern in patterns: result = parse.parse(pattern, line) if result is not None: break if result is None: raise ValueError(repr(line)) child_lib_fpath = result.named['abspath'] yield child_lib_fpath
def _git_sync(host, remote=None, dry=False, forward_ssh_agent=False, message='wip'): cwd = _getcwd() relpwd = relpath(cwd, expanduser('~')) parts = [ 'git commit -am "{}"'.format(message), ] if remote: parts += [ 'git push {remote}', 'ssh {host} "cd {relpwd} && git pull {remote}"' ] else: parts += [ 'git push', 'ssh {ssh_flags} {host} "cd {relpwd} && git pull"' ] ssh_flags = [] if forward_ssh_agent: ssh_flags += ['-A'] ssh_flags = ' '.join(ssh_flags) kw = dict(host=host, relpwd=relpwd, remote=remote, ssh_flags=ssh_flags) for part in parts: command = part.format(**kw) if not dry: result = ub.cmd(command, verbose=2) retcode = result['ret'] if command.startswith('git commit') and retcode == 1: pass elif retcode != 0: print('git-sync cannot continue. retcode={}'.format(retcode)) break else: print(command)
def do_tags(verbose=True, inplace=False, dry=True, auto_rollback=False): if verbose: if dry: print('squashing streaks (DRY RUN)') else: print('squashing streaks') # print('authors = {!r}'.format(authors)) # If you are in a repo subdirectory, find the repo root cwd = os.getcwd() repodir = cwd while True: if os.path.exists(os.path.join(repodir, '.git')): break newpath = os.path.dirname(repodir) if newpath == repodir: raise git.exc.InvalidGitRepositoryError(cwd) repodir = newpath repo = git.Repo(repodir) orig_branch_name = repo.active_branch.name # head = repo.commit('HEAD') info = ub.cmd('git tag -l --sort=v:refname', verbose=3) info2 = ub.cmd('git show-ref --tags', verbose=3) tag_to_hash = {} for line in info2['out'].splitlines(): if line: hashtext, tags = line.split(' ') tag = tags.replace('refs/tags/', '') tag_to_hash[tag] = hashtext print('tag_to_hash = {!r}'.format(tag_to_hash)) tag_order = [line for line in info['out'].splitlines() if line] custom_streaks = list(ub.iter_window(tag_order, 2)) print('Forcing hacked steaks') print('custom_streaks = {!r}'.format(custom_streaks)) streaks = [] for custom_streak in custom_streaks: print('custom_streak = {!r}'.format(custom_streak)) assert len(custom_streak) == 2 hash_a = tag_to_hash[custom_streak[0]] hash_b = tag_to_hash[custom_streak[1]] a = repo.commit(hash_a) b = repo.commit(hash_b) if repo.is_ancestor(ancestor_rev=a, rev=b): a, b = b, a # assert repo.is_ancestor(ancestor_rev=b, rev=a) streak = Streak(a, _streak=[a, b]) if len(streak.start.parents) != 1: print('WARNING: cannot include streak = {!r}'.format(streak)) continue # assert start.authored_datetime < stop.authored_datetime if not repo.is_ancestor(ancestor_rev=streak.start, rev=streak.stop): print('WARNING: cannot include streak = {!r}'.format(streak)) continue # raise AssertionError('cant handle') streaks.append(streak) if verbose: print('Found {!r} streaks'.format(len(streaks))) # Switch to a temp branch before we start working if not dry: temp_branchname = checkout_temporary_branch(repo, '-squash-temp') else: temp_branchname = None try: for streak in ub.ProgIter(streaks, 'squashing', verbose=3 * verbose): if verbose: print('Squashing streak = %r' % (str(streak),)) # Start is the commit further back in time _squash_between(repo, streak.start, streak.stop, dry=dry, verbose=verbose) except Exception as ex: print_exc(sys.exc_info()) print('ERROR: squash_streaks failed.') if not dry and auto_rollback: print('ROLLING BACK') repo.git.checkout(orig_branch_name) # repo.git.branch(D=temp_branchname) print('You can debug the difference with:') print(' gitk {} {}'.format(orig_branch_name, temp_branchname)) return if dry: if verbose: print('Finished. did nothing') elif inplace: # Copy temp branch back over original repo.git.checkout(orig_branch_name) repo.git.reset(temp_branchname, hard=True) repo.git.branch(D=temp_branchname) if verbose: print('Finished. Now you should force push the branch back to the server') else: # Go back to the original branch repo.git.checkout(orig_branch_name) if verbose: print('Finished') print('The squashed branch is: {}'.format(temp_branchname)) print('You can inspect the difference with:') print(' gitk {} {}'.format(orig_branch_name, temp_branchname)) print('Finished. Now you must manually clean this branch up.') print('Or, to automatically accept changes run with --inplace')
def main(): # sudo apt-get install -y exuberant-ctags if sys.platform.startswith('win32'): try: ensure_ctags_win32() except Exception: print('failed to get ctags.exe for win32') pass import REPOS1 import util_git1 from meta_util_git1 import get_repo_dirs from meta_util_git1 import cd BUNDLE_DPATH = REPOS1.BUNDLE_DPATH ensuredir(BUNDLE_DPATH) VIM_REPOS_WITH_SUBMODULES = REPOS1.VIM_REPOS_WITH_SUBMODULES VIM_REPO_URLS = REPOS1.VIM_REPO_URLS VIM_REPO_DIRS = get_repo_dirs(VIM_REPO_URLS, BUNDLE_DPATH) # All modules in the bundle dir (even if not listed) import os BUNDLE_DIRS = [join(BUNDLE_DPATH, name) for name in os.listdir(BUNDLE_DPATH)] cd(BUNDLE_DPATH) print('VIM_REPO_DIRS = {!r}'.format(VIM_REPO_DIRS)) print('VIM_REPO_URLS = {!r}'.format(VIM_REPO_URLS)) # util_git1.checkout_repos(VIM_REPO_URLS, VIM_REPO_DIRS) import ubelt as ub for repodir, repourl in zip(VIM_REPO_DIRS, VIM_REPO_URLS): print('[git] checkexist: ' + repodir) if not exists(repodir): cd(dirname(repodir)) ub.cmd('git clone ' + repourl, verbose=3) __NOT_GIT_REPOS__ = [] __BUNDLE_REPOS__ = [] for repodir in BUNDLE_DIRS: # Mark which repos do not have .git dirs if not util_git1.is_gitrepo(repodir): __NOT_GIT_REPOS__.append(repodir) else: __BUNDLE_REPOS__.append(repodir) if ub.argflag('--pull'): util_git1.pull_repos(__BUNDLE_REPOS__, VIM_REPOS_WITH_SUBMODULES) ensure_nongit_plugins() # Print suggestions for removing nonbundle repos if len(__NOT_GIT_REPOS__) > 0: print('Please fix these nongit repos: ') print('\n'.join(__NOT_GIT_REPOS__)) print('maybe like this: ') clutterdir = util_git1.unixpath('~/local/vim/vimfiles/clutter') suggested_cmds = ( ['mkdir ' + clutterdir] + ['mv ' + util_git1.unixpath(dir_) + ' ' + clutterdir for dir_ in __NOT_GIT_REPOS__]) print('\n'.join(suggested_cmds)) # Hack for nerd fonts # """ # cd ~/local/vim/vimfiles/bundle # # git clone https://github.com/ryanoasis/nerd-fonts.git --depth 1 # https://github.com/ryanoasis/nerd-fonts/blob/master/patched-fonts/Inconsolata/complete/Inconsolata%20Nerd%20Font%20Complete.otf # https://github.com/ryanoasis/nerd-fonts/raw/master/patched-fonts/Inconsolata/complete/Inconsolata%20Nerd%20Font%20Complete.otf # """ # HACK FOR JEDI import os for repo in VIM_REPOS_WITH_SUBMODULES: cd(join(BUNDLE_DPATH, repo)) command = 'git submodule update --init --recursive' try: import ubelt as ub ub.cmd(command, verbose=2) except Exception: os.system(command)