def get_tree_info(root, path, dpath_to_unique_fidx=dpath_to_unique_fidx, drive=drive, depth=0): path_components = ut.dirsplit(path) current = root for c in path_components: current = current[c] if isinstance(current, list): tree_tmp = [] else: key_list = list(current.keys()) child_list = [join(path, key) for key in key_list] dpath_nbytes_list = [ drive.get_total_nbytes(dpath_to_unique_fidx.get(child, [])) for child in child_list ] nfiles_list = [ len(dpath_to_unique_fidx.get(child, [])) for child in child_list ] tree_tmp = sorted([ (key, ut.byte_str2(nbytes), nfiles) if depth == 0 else (key, ut.byte_str2(nbytes), nfiles, get_tree_info(root, path=child, dpath_to_unique_fidx=dpath_to_unique_fidx, drive=drive, depth=depth - 1)) for key, child, nbytes, nfiles in zip(key_list, child_list, dpath_nbytes_list, nfiles_list) ]) return tree_tmp
def get_tree_info(root, path, dpath_to_unique_fidx=dpath_to_unique_fidx, drive=drive, depth=0): path_components = ut.dirsplit(path) current = root for c in path_components: current = current[c] if isinstance(current, list): tree_tmp = [] else: key_list = list(current.keys()) child_list = [join(path, key) for key in key_list] dpath_nbytes_list = [ drive.get_total_nbytes(dpath_to_unique_fidx.get(child, [])) for child in child_list ] nfiles_list = [ len(dpath_to_unique_fidx.get(child, [])) for child in child_list ] tree_tmp = sorted([ (key, ut.byte_str2(nbytes), nfiles) if depth == 0 else (key, ut.byte_str2(nbytes), nfiles, get_tree_info(root, path=child, dpath_to_unique_fidx=dpath_to_unique_fidx, drive=drive, depth=depth - 1)) for key, child, nbytes, nfiles in zip( key_list, child_list, dpath_nbytes_list, nfiles_list) ]) return tree_tmp
def print_tree(root, path, dpath_to_unique_fidx=dpath_to_unique_fidx, drive=drive, depth=None): print('path = %r' % (path,)) print(ut.byte_str2(drive.get_total_nbytes(dpath_to_unique_fidx[path]))) path_components = ut.dirsplit(path) # Navigate to correct spot in tree current = root for c in path_components: current = current[c] print(ut.repr3(current, truncate=1))
def print_tree(root, path, dpath_to_unique_fidx=dpath_to_unique_fidx, drive=drive, depth=None): print('path = %r' % (path, )) print(ut.byte_str2(drive.get_total_nbytes(dpath_to_unique_fidx[path]))) path_components = ut.dirsplit(path) # Navigate to correct spot in tree current = root for c in path_components: current = current[c] print(ut.repr3(current, truncate=1))
def _init_dirs(ibs, dbdir=None, dbname='testdb_1', workdir='~/ibeis_workdir', ensure=True): """ Define ibs directories """ PATH_NAMES = const.PATH_NAMES REL_PATHS = const.REL_PATHS if not ut.QUIET: print('[ibs._init_dirs] ibs.dbdir = %r' % dbdir) if dbdir is not None: workdir, dbname = split(dbdir) ibs.workdir = ut.truepath(workdir) ibs.dbname = dbname ibs.sqldb_fname = PATH_NAMES.sqldb ibs.sqlstaging_fname = PATH_NAMES.sqlstaging # Make sure you are not nesting databases assert PATH_NAMES._ibsdb != ut.dirsplit(ibs.workdir), \ 'cannot work in _ibsdb internals' assert PATH_NAMES._ibsdb != dbname,\ 'cannot create db in _ibsdb internals' ibs.dbdir = join(ibs.workdir, ibs.dbname) # All internal paths live in <dbdir>/_ibsdb # TODO: constantify these # so non controller objects (like in score normalization) have access # to these ibs._ibsdb = join(ibs.dbdir, REL_PATHS._ibsdb) ibs.trashdir = join(ibs.dbdir, REL_PATHS.trashdir) ibs.cachedir = join(ibs.dbdir, REL_PATHS.cache) ibs.backupdir = join(ibs.dbdir, REL_PATHS.backups) ibs.logsdir = join(ibs.dbdir, REL_PATHS.logs) ibs.chipdir = join(ibs.dbdir, REL_PATHS.chips) ibs.imgdir = join(ibs.dbdir, REL_PATHS.images) ibs.uploadsdir = join(ibs.dbdir, REL_PATHS.uploads) # All computed dirs live in <dbdir>/_ibsdb/_ibeis_cache ibs.thumb_dpath = join(ibs.dbdir, REL_PATHS.thumbs) ibs.flanndir = join(ibs.dbdir, REL_PATHS.flann) ibs.qresdir = join(ibs.dbdir, REL_PATHS.qres) ibs.bigcachedir = join(ibs.dbdir, REL_PATHS.bigcache) ibs.distinctdir = join(ibs.dbdir, REL_PATHS.distinctdir) if ensure: ibs.ensure_directories() assert dbdir is not None, 'must specify database directory'
def make_tree_structure(valid_fpaths): root = {} def dict_getitem_default(dict_, key, type_): try: val = dict_[key] except KeyError: val = type_() dict_[key] = val return val for fpath in ut.ProgIter(valid_fpaths, 'building tree', freq=30000): path_components = ut.dirsplit(fpath) current = root for comp in path_components[:-1]: current = dict_getitem_default(current, comp, dict) contents = dict_getitem_default(current, '.', list) contents.append(path_components[-1]) return root
def _init_dirs(ibs, dbdir=None, dbname='testdb_1', workdir='~/ibeis_workdir', ensure=True): """ Define ibs directories """ PATH_NAMES = const.PATH_NAMES REL_PATHS = const.REL_PATHS if not ut.QUIET: print('[ibs._init_dirs] ibs.dbdir = %r' % dbdir) if dbdir is not None: workdir, dbname = split(dbdir) ibs.workdir = ut.truepath(workdir) ibs.dbname = dbname ibs.sqldb_fname = PATH_NAMES.sqldb # Make sure you are not nesting databases assert PATH_NAMES._ibsdb != ut.dirsplit(ibs.workdir), \ 'cannot work in _ibsdb internals' assert PATH_NAMES._ibsdb != dbname,\ 'cannot create db in _ibsdb internals' ibs.dbdir = join(ibs.workdir, ibs.dbname) # All internal paths live in <dbdir>/_ibsdb # TODO: constantify these # so non controller objects (like in score normalization) have access # to these ibs._ibsdb = join(ibs.dbdir, REL_PATHS._ibsdb) ibs.trashdir = join(ibs.dbdir, REL_PATHS.trashdir) ibs.cachedir = join(ibs.dbdir, REL_PATHS.cache) ibs.backupdir = join(ibs.dbdir, REL_PATHS.backups) ibs.chipdir = join(ibs.dbdir, REL_PATHS.chips) ibs.imgdir = join(ibs.dbdir, REL_PATHS.images) ibs.uploadsdir = join(ibs.dbdir, REL_PATHS.uploads) # All computed dirs live in <dbdir>/_ibsdb/_ibeis_cache ibs.thumb_dpath = join(ibs.dbdir, REL_PATHS.thumbs) ibs.flanndir = join(ibs.dbdir, REL_PATHS.flann) ibs.qresdir = join(ibs.dbdir, REL_PATHS.qres) ibs.bigcachedir = join(ibs.dbdir, REL_PATHS.bigcache) ibs.distinctdir = join(ibs.dbdir, REL_PATHS.distinctdir) if ensure: ibs.ensure_directories() assert dbdir is not None, 'must specify database directory'
def find_packages(recursive=True, maxdepth=None): """ Finds all directories with an __init__.py file in them """ import utool if utool.VERBOSE: print('[util_setup] find_packages(recursive=%r, maxdepth=%r)' % (recursive, maxdepth)) from os.path import relpath cwd = os.getcwd() init_files = utool.glob(cwd, '__init__.py', recursive=recursive, maxdepth=maxdepth) package_paths = list(map(dirname, init_files)) package_relpaths = [relpath(path, cwd) for path in package_paths] packages = [] for path in package_relpaths: base = utool.dirsplit(path)[0] if exists(join(base, '__init__.py')): package = path.replace('/', '.').replace('\\', '.') packages.append(package) return packages