Exemple #1
0
def copy_ibeisdb(source_dbdir, dest_dbdir):
    # TODO: rectify with rsync, script, and merge script.
    from os.path import normpath
    import ibeis
    exclude_dirs_ = (ibeis.const.EXCLUDE_COPY_REL_DIRS +
                     ['_hsdb', '.hs_internals'])
    exclude_dirs = [ut.ensure_unixslash(normpath(rel))
                    for rel in exclude_dirs_]

    rel_tocopy = ut.glob(source_dbdir, '*', exclude_dirs=exclude_dirs,
                         recursive=True, with_files=True, with_dirs=False,
                         fullpath=False)
    rel_tocopy_dirs = ut.glob(source_dbdir, '*', exclude_dirs=exclude_dirs,
                              recursive=True, with_files=False, with_dirs=True,
                              fullpath=False)

    src_list = [join(source_dbdir, relpath) for relpath in rel_tocopy]
    dst_list = [join(dest_dbdir, relpath) for relpath in rel_tocopy]

    # ensure directories exist
    rel_tocopy_dirs = [dest_dbdir] + [join(dest_dbdir, dpath_)
                                      for dpath_ in rel_tocopy_dirs]
    for dpath in rel_tocopy_dirs:
        ut.ensuredir(dpath)
    # copy files
    ut.copy(src_list, dst_list)
Exemple #2
0
def revert_to_backup(ibs):
    r"""
    Args:
        db_dir (?):

    CommandLine:
        python -m wbia.control._sql_helpers --exec-revert_to_backup

    Example:
        >>> # SCRIPT
        >>> from wbia.control._sql_helpers import *  # NOQA
        >>> import wbia
        >>> ibs = wbia.opendb(defaultdb='elephants')
        >>> result = revert_to_backup(ibs)
        >>> print(result)
    """
    db_path = ibs.get_db_core_path()
    staging_path = ibs.get_db_staging_path()

    ibs.disconnect_sqldatabase()
    backup_dir = ibs.backupdir

    # Core database
    fname, ext = splitext(db_path)
    db_path_ = '%s_revert.sqlite3' % (fname, )
    ut.move(db_path, db_path_)
    fpath, fname = split(fname)
    path_list = sorted(ut.glob(backup_dir, '%s_*%s' % (
        fname,
        ext,
    )))
    assert len(path_list) > 0
    previous_backup = path_list[-1]
    copy_database(previous_backup, db_path)

    # Staging database
    fname, ext = splitext(staging_path)
    staging_path_ = '%s_revert.sqlite3' % (fname, )
    ut.move(staging_path, staging_path_)
    fpath, fname = split(fname)
    path_list = sorted(ut.glob(backup_dir, '%s_*%s' % (
        fname,
        ext,
    )))
    assert len(path_list) > 0
    previous_backup = path_list[-1]
    copy_database(previous_backup, staging_path)

    # Delete the cache
    ut.delete(ibs.cachedir)
Exemple #3
0
def _devcheck_backups():
    import dtool as dt
    dbdir = ut.truepath('~/work/PZ_Master1/_ibsdb')
    sorted(ut.glob(join(dbdir, '_ibeis_backups'), '*staging_back*.sqlite3'))
    fpaths = sorted(
        ut.glob(join(dbdir, '_ibeis_backups'), '*database_back*.sqlite3'))
    for fpath in fpaths:
        db = dt.SQLDatabaseController(fpath=fpath)
        print('fpath = %r' % (fpath, ))
        num_edges = len(db.executeone('SELECT rowid from annotmatch'))
        print('num_edges = %r' % (num_edges, ))
        num_names = len(
            db.executeone('SELECT DISTINCT name_rowid from annotations'))
        print('num_names = %r' % (num_names, ))
Exemple #4
0
    def delete_empty_directories(self):
        """
        ut.ensuredir(self.dpath + '/foo')
        ut.ensuredir(self.dpath + '/foo/bar')
        ut.ensuredir(self.dpath + '/foo/bar/baz')
        self.delete_empty_directories()
        """
        import os
        # for root, dirs, files in os.walk(self.dpath, topdown=False):
        #     if len(files) == 0 and len(os.listdir(root)) == 0:
        #         print('Remove %s' % root)
        #         os.rmdir(root)

        if True:
            # Find all directories with no files
            subdirs = ut.glob(self.dpath, '*',  recursive=True, fullpath=False,
                              with_files=False, with_dirs=True)
            freq = {d: 0 for d in subdirs + ['']}
            for path in self.rel_fpath_list:
                while True:
                    path = dirname(path)
                    freq[path] += 1
                    if not path:
                        break
            to_delete = [k for k, v in freq.items() if v == 0]
            # Remove deep dirs first
            to_delete = ut.sortedby(to_delete, map(len, to_delete))[::-1]
            for d in to_delete:
                dpath = join(self.dpath, d)
                print('Remove %s' % dpath)
                os.rmdir(dpath)
def _devcheck_backups():
    from wbia import dtool as dt

    dbdir = ut.truepath('~/work/PZ_Master1/_ibsdb')
    sorted(ut.glob(join(dbdir, '_wbia_backups'), '*staging_back*.sqlite3'))
    fpaths = sorted(
        ut.glob(join(dbdir, '_wbia_backups'), '*database_back*.sqlite3'))
    for fpath in fpaths:
        db_uri = 'sqlite:///{}'.format(realpath(fpath))
        db = dt.SQLDatabaseController(db_uri, 'PZ_Master1')
        logger.info('fpath = %r' % (fpath, ))
        num_edges = len(db.executeone('SELECT rowid from annotmatch'))
        logger.info('num_edges = %r' % (num_edges, ))
        num_names = len(
            db.executeone('SELECT DISTINCT name_rowid from annotations'))
        logger.info('num_names = %r' % (num_names, ))
Exemple #6
0
def revert_to_backup(ibs):
    r"""
    Args:
        db_dir (?):

    CommandLine:
        python -m ibeis.control._sql_helpers --exec-revert_to_backup

    Example:
        >>> # SCRIPT
        >>> from ibeis.control._sql_helpers import *  # NOQA
        >>> import ibeis
        >>> ibs = ibeis.opendb(defaultdb='GZ_Master1')
        >>> result = revert_to_backup(ibs)
        >>> print(result)
    """
    db_path = ibs.get_db_core_path()
    ibs.disconnect_sqldatabase()
    backup_dir = ibs.backupdir

    ut.move(db_path, ut.get_nonconflicting_path(db_path + 'revertfrom.%d.orig'))
    # Carefull may invalidate the cache
    fname, ext = splitext(db_path)
    path_list = sorted(ut.glob(backup_dir, '*%s' % ext))
    previous_backup = path_list[-1]
    ut.copy(previous_backup, db_path)
Exemple #7
0
def remove_old_backups(backup_dir, ext, max_keep):
    path_list = sorted(ut.glob(backup_dir, '*%s' % ext))
    if len(path_list) > max_keep:
        path_delete_list = path_list[:-1 * max_keep]
        for path_delete in path_delete_list:
            print('[ensure_daily_database_backup] Deleting old backup %r' % path_delete)
            ut.remove_file(path_delete, verbose=False)
Exemple #8
0
    def delete_empty_directories(self):
        """
        ut.ensuredir(self.dpath + '/foo')
        ut.ensuredir(self.dpath + '/foo/bar')
        ut.ensuredir(self.dpath + '/foo/bar/baz')
        self.delete_empty_directories()
        """
        import os
        # for root, dirs, files in os.walk(self.dpath, topdown=False):
        #     if len(files) == 0 and len(os.listdir(root)) == 0:
        #         print('Remove %s' % root)
        #         os.rmdir(root)

        if True:
            # Find all directories with no files
            subdirs = ut.glob(self.dpath,
                              '*',
                              recursive=True,
                              fullpath=False,
                              with_files=False,
                              with_dirs=True)
            freq = {d: 0 for d in subdirs + ['']}
            for path in self.rel_fpath_list:
                while True:
                    path = dirname(path)
                    freq[path] += 1
                    if not path:
                        break
            to_delete = [k for k, v in freq.items() if v == 0]
            # Remove deep dirs first
            to_delete = ut.sortedby(to_delete, map(len, to_delete))[::-1]
            for d in to_delete:
                dpath = join(self.dpath, d)
                print('Remove %s' % dpath)
                os.rmdir(dpath)
Exemple #9
0
def copy_ibeisdb(source_dbdir, dest_dbdir):
    # TODO; rectify with rsycn script
    from os.path import normpath
    import ibeis
    exclude_dirs = [ut.ensure_unixslash(normpath(rel)) for rel in ibeis.const.EXCLUDE_COPY_REL_DIRS + ['_hsdb', '.hs_internals']]

    rel_tocopy = ut.glob(source_dbdir, '*', exclude_dirs=exclude_dirs, recursive=True, with_files=True, with_dirs=False, fullpath=False)
    rel_tocopy_dirs = ut.glob(source_dbdir, '*', exclude_dirs=exclude_dirs, recursive=True, with_files=False, with_dirs=True, fullpath=False)

    src_list = [join(source_dbdir, relpath) for relpath in rel_tocopy]
    dst_list = [join(dest_dbdir, relpath) for relpath in rel_tocopy]

    # ensure directories exist
    rel_tocopy_dirs = [dest_dbdir] + [join(dest_dbdir, dpath_) for dpath_ in rel_tocopy_dirs]
    for dpath in rel_tocopy_dirs:
        ut.ensuredir(dpath)
    # copy files
    ut.copy(src_list, dst_list)
Exemple #10
0
 def populate(self):
     self.rel_fpath_list = ut.glob(self.dpath, '*',  recursive=True,
                                   fullpath=False, with_dirs=False)
     self.attrs = {
         # 'nbytes': list(map(ut.get_file_nBytes, self.fpaths())),
         'fname': list(map(basename, self.rel_fpath_list)),
         'dname': list(map(dirname, self.rel_fpath_list)),
         'ext': list(map(lambda p: splitext(p)[1].lower().replace('.jpeg', '.jpg'), self.rel_fpath_list)),
     }
Exemple #11
0
def get_backup_fpaths(ibs):
    fname, ext = splitext(ibs.sqldb_fname)
    backups = sorted(ut.glob(ibs.backupdir, '*%s' % ext))
    #backup_info = [ut.get_file_info(fpath) for fpath in backups]
    modified = [ut.get_file_info(fpath)['last_modified'] for fpath in backups]
    unixtimes = [ut.util_time.exiftime_to_unixtime(tag) for tag in modified]
    backups = ut.sortedby(backups, unixtimes)
    return backups
    #backup_uuids = [ut.get_file_uuid(fpath) for fpath in backups]
    #backup_hashes = [ut.get_file_hash(fpath) for fpath in backups]
    #backup_bytes = [ut.get_file_nBytes(fpath) for fpath in backups]
    pass
Exemple #12
0
def get_fpath_args(arglist_=None, pat='*'):
    import utool
    if arglist_ is None:
        arglist_ = sys.argv[1:]
    input_path_list = []
    for input_path in arglist_:
        input_path = utool.truepath(input_path)
        if os.path.isdir(input_path):
            input_path_list.extend(utool.glob(input_path, pat, recursive=False, with_dirs=False))
        else:
            input_path_list.append(input_path)
    return input_path_list
Exemple #13
0
 def find_empty_dirs(self):
     """ find dirs with only dirs in them """
     self.rel_dpath_list = ut.glob(self.dpath, '*',  recursive=True,
                                   fullpath=False, with_dirs=True, with_files=False)
     counts = {dpath: 0 for dpath in self.rel_dpath_list}
     for fpath in self.rel_fpath_list:
         tmp = dirname(fpath)
         while tmp:
             counts[tmp] += 1
             tmp = dirname(tmp)
     empty_dpaths = [dpath for dpath, count in counts.items() if count == 0]
     return empty_dpaths
Exemple #14
0
def search_env_paths(fname, key_list=None, verbose=None):
    r"""
    Searches your PATH to see if fname exists

    Args:
        fname (str): file name to search for (can be glob pattern)

    CommandLine:
        python -m utool search_env_paths --fname msvcr*.dll
        python -m utool search_env_paths --fname '*flann*'

    Example:
        >>> # DISABLE_DOCTEST
        >>> from utool.util_cplat import *  # NOQA
        >>> import utool as ut
        >>> fname = 'opencv2/highgui/libopencv_highgui.so'
        >>> fname = ut.get_argval('--fname', default='*')
        >>> print('fname = %r' % (fname,))
        >>> key_list = None # ['PATH']
        >>> found = search_env_paths(fname, key_list)
        >>> print(ut.repr4(found, nl=True, strvals=True))

    Ignore:
        OpenCV_DIR:PATH={share_opencv}
        OpenCV_CONFIG_PATH:FILEPATH={share_opencv}

    """
    import utool as ut
    # from os.path import join
    if key_list is None:
        key_list = [key for key in os.environ if key.find('PATH') > -1]
        print('key_list = %r' % (key_list,))

    found = ut.ddict(list)

    for key in key_list:
        dpath_list = os.environ[key].split(os.pathsep)
        for dpath in dpath_list:
            #if verbose:
            #    print('dpath = %r' % (dpath,))
            # testname = join(dpath, fname)
            matches = ut.glob(dpath, fname)
            found[key].extend(matches)
            #import fnmatch
            #import utool
            #utool.embed()
            #if ut.checkpath(testname, verbose=False):
            #    if verbose:
            #        print('Found in key=%r' % (key,))
            #        ut.checkpath(testname, verbose=True, info=True)
            #    found += [testname]
    return dict(found)
Exemple #15
0
def fix_youtube_names_ccl(r):
    import utool
    cwd = os.getcwd()
    fpath_list = utool.glob(cwd, '*.mp4')
    for fpath in fpath_list:
        #print(fpath)
        dpath, fname = split(fpath)
        found = utool.regex_search(r'Crash Course .*-', fname)
        if found is not None:
            found = found.replace('English', '').replace('-', ' - ')
            new_fpath = join(dpath, found + fname.replace(found, ''))
            print(new_fpath)
            shutil.move(fpath, new_fpath)
Exemple #16
0
def get_cached_vocabs():
    import parse
    # Parse some of the training data from fname
    parse_str = '{}nC={num_cent},{}_DPTS(({num_dpts},{dim}){}'
    smkdir = ut.get_app_resource_dir('smk')
    fname_list = ut.glob(smkdir, 'akmeans*')
    fpath_list = [join(smkdir, fname) for fname in fname_list]
    result_list = [parse.parse(parse_str, fpath) for fpath in fpath_list]
    nCent_list = [int(res['num_cent']) for res in result_list]
    nDpts_list = [int(res['num_dpts']) for res in result_list]
    key_list = zip(nCent_list, nDpts_list)
    fpath_sorted = ut.sortedby(fpath_list, key_list, reverse=True)
    return fpath_sorted
Exemple #17
0
def get_fpath_args(arglist_=None, pat='*'):
    import utool
    if arglist_ is None:
        arglist_ = sys.argv[1:]
    input_path_list = []
    for input_path in arglist_:
        input_path = utool.truepath(input_path)
        if os.path.isdir(input_path):
            input_path_list.extend(
                utool.glob(input_path, pat, recursive=False, with_dirs=False))
        else:
            input_path_list.append(input_path)
    return input_path_list
Exemple #18
0
def get_cached_vocabs():
    import parse
    # Parse some of the training data from fname
    parse_str = '{}nC={num_cent},{}_DPTS(({num_dpts},{dim}){}'
    smkdir = ut.get_app_resource_dir('smk')
    fname_list = ut.glob(smkdir, 'akmeans*')
    fpath_list = [join(smkdir, fname) for fname in fname_list]
    result_list = [parse.parse(parse_str, fpath) for fpath in fpath_list]
    nCent_list = [int(res['num_cent']) for res in result_list]
    nDpts_list = [int(res['num_dpts']) for res in result_list]
    key_list = zip(nCent_list, nDpts_list)
    fpath_sorted = ut.sortedby(fpath_list, key_list, reverse=True)
    return fpath_sorted
Exemple #19
0
def fix_youtube_names_ccl(r):
    import utool
    cwd = os.getcwd()
    fpath_list = utool.glob(cwd, '*.mp4')
    for fpath in fpath_list:
        #print(fpath)
        dpath, fname = split(fpath)
        found = utool.regex_search(r'Crash Course .*-', fname)
        if found is not None:
            found = found.replace('English', '').replace('-', ' - ')
            new_fpath = join(dpath, found + fname.replace(found, ''))
            print(new_fpath)
            shutil.move(fpath, new_fpath)
Exemple #20
0
def search_env_paths(fname, key_list=None, verbose=None):
    r"""
    Searches your PATH to see if fname exists

    Args:
        fname (str): file name to search for (can be glob pattern)

    CommandLine:
        python -m utool search_env_paths --fname msvcr*.dll

    Example:
        >>> # DISABLE_DOCTEST
        >>> from utool.util_cplat import *  # NOQA
        >>> import utool as ut
        >>> fname = 'opencv2/highgui/libopencv_highgui.so'
        >>> fname = ut.get_argval('--fname', default='*')
        >>> key_list = ['PATH']
        >>> found = search_env_paths(fname, key_list)
        >>> print(ut.dict_str(found, nl=True, strvals=True))

    Ignore:
        OpenCV_DIR:PATH={share_opencv}
        OpenCV_CONFIG_PATH:FILEPATH={share_opencv}

    """
    import utool as ut
    # from os.path import join
    if key_list is None:
        key_list = [key for key in os.environ if key.find('PATH') > -1]

    found = ut.ddict(list)

    for key in key_list:
        dpath_list = os.environ[key].split(os.pathsep)
        for dpath in dpath_list:
            #if verbose:
            #    print('dpath = %r' % (dpath,))
            # testname = join(dpath, fname)
            matches = ut.glob(dpath, fname)
            found[key].extend(matches)
            #import fnmatch
            #import utool
            #utool.embed()
            #if ut.checkpath(testname, verbose=False):
            #    if verbose:
            #        print('Found in key=%r' % (key,))
            #        ut.checkpath(testname, verbose=True, info=True)
            #    found += [testname]
    return dict(found)
Exemple #21
0
 def check_cpp_build(repo):
     import utool as ut
     script = repo.get_script('build')
     if script.is_fpath_valid():
         if repo.modname == 'pyflann':
             return True, 'cant detect flann cpp'
         # hack, this doesnt quite do it
         pat = '*' + ut.util_cplat.get_pylib_ext()
         dynlibs = ut.glob(repo.dpath + '/' + repo.modname, pat, recursive=True)
         msg = 'Could not find any dynamic libraries'
         flag = len(dynlibs) > 0
     else:
         flag = True
         msg = 'passed, but didnt expect anything'
     return flag, msg
Exemple #22
0
def get_system_python_library():
    """
    FIXME; hacky way of finding python library. Not cross platform yet.
    """
    import os
    import utool as ut
    from os.path import basename, realpath
    pyname = basename(realpath(sys.executable))
    ld_library_path = os.environ['LD_LIBRARY_PATH']
    libdirs = [x for x in ld_library_path.split(os.pathsep) if x] + ['/usr/lib']
    libfiles = ut.flatten([ut.glob(d, '*' + ut.get_lib_ext(), recursive=True) for d in libdirs])
    python_libs = [realpath(f) for f in libfiles if 'lib' + pyname in basename(f)]
    python_libs = ut.unique_ordered(python_libs)
    assert len(python_libs) == 1, str(python_libs)
    return python_libs[0]
Exemple #23
0
 def extract_zipfile_images(ibs, ingestable):
     import utool as ut  # NOQA
     zipfile_list = ut.glob(ingestable.img_dir, '*.zip', recursive=True)
     if len(zipfile_list) > 0:
         print('Found zipfile_list = %r' % (zipfile_list,))
         ut.ensuredir(unzipped_file_base_dir)
         for zipfile in zipfile_list:
             unziped_file_relpath = dirname(relpath(relpath(realpath(zipfile), realpath(ingestable.img_dir))))
             unzipped_file_dir = join(unzipped_file_base_dir, unziped_file_relpath)
             ut.ensuredir(unzipped_file_dir)
             ut.unzip_file(zipfile, output_dir=unzipped_file_dir, overwrite=False)
         gpath_list = ut.list_images(unzipped_file_dir, fullpath=True, recursive=True)
     else:
         gpath_list = []
     return gpath_list
Exemple #24
0
 def check_cpp_build(repo):
     import utool as ut
     script = repo.get_script('build')
     if script.is_fpath_valid():
         if repo.modname == 'pyflann':
             return True, 'cant detect flann cpp'
         # hack, this doesnt quite do it
         pat = '*' + ut.util_cplat.get_pylib_ext()
         dynlibs = ut.glob(repo.dpath + '/' + repo.modname, pat, recursive=True)
         msg = 'Could not find any dynamic libraries'
         flag = len(dynlibs) > 0
     else:
         flag = True
         msg = 'passed, but didnt expect anything'
     return flag, msg
Exemple #25
0
def translate_all():
    """ Translates a all python paths in directory """
    dpaths = utool.ls_moduledirs('.')
    #print('[cyth] translate_all: %r' % (dpaths,))

    globkw = {'recursive': True, 'with_dirs': False, 'with_files': True}
    # Find all unique python files in directory
    fpaths_iter = [
        utool.glob(utool.unixpath(dpath), '*.py', **globkw) for dpath in dpaths
    ]
    fpath_iter = utool.iflatten(fpaths_iter)
    abspath_iter = map(utool.unixpath, fpath_iter)
    fpath_list = list(set(list(abspath_iter)))
    #print('[cyth] translate_all: %s' % ('\n'.join(fpath_list),))
    # Try to translate each
    translate(*fpath_list)
Exemple #26
0
 def find_empty_dirs(self):
     """ find dirs with only dirs in them """
     self.rel_dpath_list = ut.glob(self.dpath,
                                   '*',
                                   recursive=True,
                                   fullpath=False,
                                   with_dirs=True,
                                   with_files=False)
     counts = {dpath: 0 for dpath in self.rel_dpath_list}
     for fpath in self.rel_fpath_list:
         tmp = dirname(fpath)
         while tmp:
             counts[tmp] += 1
             tmp = dirname(tmp)
     empty_dpaths = [dpath for dpath, count in counts.items() if count == 0]
     return empty_dpaths
Exemple #27
0
    def check_cpp_build(repo):
        import utool as ut

        script = repo.get_script("build")
        if script.is_fpath_valid():
            if repo.modname == "pyflann":
                return True, "cant detect flann cpp"
            # hack, this doesnt quite do it
            pat = "*" + ut.util_cplat.get_pylib_ext()
            dynlibs = ut.glob(repo.dpath + "/" + repo.modname, pat, recursive=True)
            msg = "Could not find any dynamic libraries"
            flag = len(dynlibs) > 0
        else:
            flag = True
            msg = "passed, but didnt expect anything"
        return flag, msg
def compute_forgroundness(fpath1, kpts1, species='zebra_plains'):
    """
    hack in foregroundness
    """
    import pyrf
    import vtool as vt
    from os.path import exists
    # hack for getting a model (not entirely ibeis independent)
    trees_path = ut.get_app_resource_dir('ibeis', 'detectmodels', 'rf',
                                         species)
    tree_fpath_list = ut.glob(trees_path, '*.txt')
    detector = pyrf.Random_Forest_Detector()
    # TODO; might need to downsample
    forest = detector.forest(tree_fpath_list, verbose=False)
    gpath_list = [fpath1]
    output_gpath_list = [
        gpath + '.' + species + '.probchip.png' for gpath in gpath_list
    ]
    detectkw = {
        'scale_list': [1.15, 1.0, 0.85, 0.7, 0.55, 0.4, 0.25, 0.1],
        'output_gpath_list': output_gpath_list,
        'mode': 1,  # mode one outputs probimage
    }
    results_iter = detector.detect(forest, gpath_list, **detectkw)
    results_list = list(results_iter)  # NOQA
    probchip_list = [
        vt.imread(gpath, grayscale=True) if exists(gpath) else None
        for gpath in output_gpath_list
    ]
    #vtpatch.get_warped_patches()
    fgweights_list = []
    kpts_list = [kpts1]
    for probchip, kpts in zip(probchip_list, kpts_list):
        patch_list = [
            vt.get_warped_patch(probchip, kp)[0].astype(np.float32) / 255.0
            for kp in kpts
        ]
        weight_list = [
            vt.gaussian_average_patch(patch) for patch in patch_list
        ]
        #weight_list = [patch.sum() / (patch.size) for patch in patch_list]
        weights = np.array(weight_list, dtype=np.float32)
        fgweights_list.append(weights)
    fgweights = fgweights_list[0]
    detector.free_forest(forest)
    return fgweights
Exemple #29
0
 def populate(self):
     self.rel_fpath_list = ut.glob(self.dpath,
                                   '*',
                                   recursive=True,
                                   fullpath=False,
                                   with_dirs=False)
     self.attrs = {
         # 'nbytes': list(map(ut.get_file_nBytes, self.fpaths())),
         'fname':
         list(map(basename, self.rel_fpath_list)),
         'dname':
         list(map(dirname, self.rel_fpath_list)),
         'ext':
         list(
             map(lambda p: splitext(p)[1].lower().replace('.jpeg', '.jpg'),
                 self.rel_fpath_list)),
     }
    def ensure_results(self, expt_name=None, nocompute=None):
        """
        Subclasses must obey the measure_<expt_name>, draw_<expt_name> contract
        """
        if nocompute is None:
            nocompute = ut.get_argflag('--nocompute')

        if expt_name is None and exists(self.dpath):
            # Load all
            fpaths = ut.glob(str(self.dpath), '*.pkl')
            expt_names = [splitext(basename(fpath))[0] for fpath in fpaths]
            for fpath, expt_name in zip(fpaths, expt_names):
                self.expt_results[expt_name] = ut.load_data(fpath)
        else:
            # expt_name = splitext(basename(fpath))[0]
            fpath = join(str(self.dpath), expt_name + '.pkl')
            # fpath = ut.truepath(fpath)
            if not exists(fpath):
                ut.cprint(
                    'Experiment results {} do not exist'.format(expt_name),
                    'red')
                ut.cprint('First re-setup to check if it is a path issue',
                          'red')
                if nocompute:
                    raise Exception(
                        str(expt_name) + ' does not exist for ' +
                        str(self.dbname))

                if self.ibs is None:
                    self._precollect()
                ut.cprint('Checking new fpath', 'yellow')
                fpath = join(str(self.dpath), expt_name + '.pkl')
                logger.info('fpath = %r' % (fpath, ))
                if not exists(fpath):
                    ut.cprint('Results still missing need to re-measure',
                              'red')
                    # assert False
                    # self._setup()
                    getattr(self, 'measure_' + expt_name)()
                else:
                    ut.cprint('Re-setup fixed it', 'green')
            else:
                logger.info('Experiment results {} exist'.format(expt_name))
            self.expt_results[expt_name] = ut.load_data(fpath)
            return self.expt_results[expt_name]
Exemple #31
0
def build_uninstall_script():
    #import utool as ut
    from os.path import join
    #import parse
    pydir = 'C:/Python27'
    uninstall_list = ut.glob(pydir, 'Remove*.exe')
    cmd_list = []
    for exefname in uninstall_list:
        parse_result = parse.parse('{pypath}Remove{pkgname}.exe', exefname)
        pkgname = parse_result['pkgname']
        logfname = pkgname + '-wininst.log'
        logfpath = join(pydir, logfname)
        exefpath = join(pydir, exefname)
        cmd = '"' + exefpath + '" -u "' + logfpath + '"'
        cmd_list.append(cmd)

    script_text = ('\n'.join(cmd_list))
    print(script_text)
Exemple #32
0
def build_uninstall_script():
    #import utool as ut
    from os.path import join
    #import parse
    pydir = 'C:/Python27'
    uninstall_list = ut.glob(pydir, 'Remove*.exe')
    cmd_list = []
    for exefname in uninstall_list:
        parse_result = parse.parse('{pypath}Remove{pkgname}.exe', exefname)
        pkgname = parse_result['pkgname']
        logfname = pkgname + '-wininst.log'
        logfpath = join(pydir, logfname)
        exefpath = join(pydir, exefname)
        cmd = '"' + exefpath + '" -u "' + logfpath + '"'
        cmd_list.append(cmd)

    script_text = ('\n'.join(cmd_list))
    print(script_text)
Exemple #33
0
def get_system_python_library():
    """
    FIXME; hacky way of finding python library. Not cross platform yet.
    """
    import os
    import utool as ut
    from os.path import basename, realpath
    pyname = basename(realpath(sys.executable))
    ld_library_path = os.environ['LD_LIBRARY_PATH']
    libdirs = [x
               for x in ld_library_path.split(os.pathsep) if x] + ['/usr/lib']
    libfiles = ut.flatten(
        [ut.glob(d, '*' + ut.get_lib_ext(), recursive=True) for d in libdirs])
    python_libs = [
        realpath(f) for f in libfiles if 'lib' + pyname in basename(f)
    ]
    python_libs = ut.unique_ordered(python_libs)
    assert len(python_libs) == 1, str(python_libs)
    return python_libs[0]
Exemple #34
0
def glob_projects(pat, user_profile=None, recursive=True):
    """

    def testenv(modname, funcname):
        ut.import_modname(modname)
        exec(ut.execstr_funckw(table.get_rowid), globals())

        pass

        >>> import utool as ut
        >>> ut.testenv('utool.util_project', 'glob_projects', globals())
        >>> from utool.util_project import *  # NOQA
    """
    import utool as ut  # NOQA
    user_profile = ensure_user_profile(user_profile)
    glob_results = ut.flatten([ut.glob(dpath, pat, recursive=recursive,
                                       exclude_dirs=user_profile.project_exclude_dirs)
                               for dpath in user_profile.project_dpaths])
    return glob_results
Exemple #35
0
def translate_all():
    """ Translates a all python paths in directory """
    dpaths = utool.ls_moduledirs('.')
    #print('[cyth] translate_all: %r' % (dpaths,))

    globkw = {
        'recursive': True,
        'with_dirs': False,
        'with_files': True
    }
    # Find all unique python files in directory
    fpaths_iter = [utool.glob(utool.unixpath(dpath), '*.py', **globkw)
                   for dpath in dpaths]
    fpath_iter = utool.iflatten(fpaths_iter)
    abspath_iter = map(utool.unixpath, fpath_iter)
    fpath_list = list(set(list(abspath_iter)))
    #print('[cyth] translate_all: %s' % ('\n'.join(fpath_list),))
    # Try to translate each
    translate(*fpath_list)
Exemple #36
0
def find_packages(recursive=True, maxdepth=None):
    """
    Finds all directories with an __init__.py file in them
    """
    import utool
    if utool.VERBOSE:
        print('[util_setup] find_packages(recursive=%r, maxdepth=%r)' % (recursive, maxdepth))
    from os.path import relpath
    cwd = os.getcwd()
    init_files = utool.glob(cwd, '__init__.py', recursive=recursive, maxdepth=maxdepth)
    package_paths = list(map(dirname, init_files))
    package_relpaths = [relpath(path, cwd) for path in package_paths]

    packages = []
    for path in package_relpaths:
        base = utool.dirsplit(path)[0]
        if exists(join(base, '__init__.py')):
            package = path.replace('/', '.').replace('\\', '.')
            packages.append(package)
    return packages
def testdata_fpaths():
    dpath = '.'
    #tex_fpath_list = ut.ls(dpath, 'chapter*.tex') + ut.ls(dpath, 'appendix.tex')
    patterns = [
        'chapter*.tex',
        'sec-*.tex',
        'figdef*.tex',
        'def.tex',
        'pairwise-classifier.tex',
        'graph-id.tex',
        'appendix.tex',
        'main.tex',
        'graph_id.tex',
    ]
    exclude_dirs = ['guts']
    tex_fpath_list = sorted(
        ut.glob(dpath, patterns, recursive=True, exclude_dirs=exclude_dirs)
    )
    tex_fpath_list = ut.get_argval('--fpaths', type_=list, default=tex_fpath_list)
    return tex_fpath_list
Exemple #38
0
def find_ext_modules(disable_warnings=True):
    from setuptools import Extension
    import utool
    from os.path import relpath
    cwd = os.getcwd()

    # CYTH      = 'cyth' in sys.argv
    BEXT = 'bext' in sys.argv
    BUILD = 'build' in sys.argv
    BUILD_EXT = 'build_ext' in sys.argv

    # if any([BEXT, CYTH]):
    #     translate_cyth()  # translate cyth before finding ext modules

    if not any([BEXT, BUILD, BUILD_EXT]):
        # dont find modules if they are not being built
        return []

    #pyx_list = utool.glob(cwd, '*_cython.pyx', recursive=True)
    pyx_list = utool.glob(cwd, '*.pyx', recursive=True)

    if disable_warnings:
        extra_compile_args = ['-Wno-format', '-Wno-unused-function']
    else:
        extra_compile_args = []

    ext_modules = []
    for pyx_abspath in pyx_list:
        pyx_relpath = relpath(pyx_abspath, cwd)
        pyx_modname, _ = splitext(
            pyx_relpath.replace('\\', '.').replace('/', '.'))
        print('[find_ext] Found Module:')
        print('   * pyx_modname = %r' % (pyx_modname, ))
        print('   * pyx_relpath = %r' % (pyx_relpath, ))
        extmod = Extension(pyx_modname, [pyx_relpath],
                           include_dirs=[get_numpy_include_dir()],
                           extra_compile_args=extra_compile_args)
        ext_modules.append(extmod)
    return ext_modules
Exemple #39
0
    def tozip():
        re_fpath = ut.named_field('fpath', 'figure.*?[jp][pn]g') + '}'
        patterns = [
            'chapter4-application.tex', 'figdef4*', 'main.tex', 'def.tex',
            'Crall*', 'thesis.cls', 'header*', 'colordef.tex', '*.bib'
        ]
        exclude_dirs = ['guts']
        fpaths = sorted(
            ut.glob('.', patterns, recursive=True, exclude_dirs=exclude_dirs))

        tup = ut.grep(re_fpath, fpath_list=fpaths, verbose=True)
        found_fpath_list, found_lines_list, found_lxs_list = tup
        fig_fpath_list = []
        for line in ut.flatten(found_lines_list):
            if not line.startswith('%'):
                for match in re.finditer(re_fpath, line):
                    fig_fpath = match.groupdict()['fpath']
                    if 'junc' not in fig_fpath and 'markov' not in fig_fpath and 'bayes' not in fig_fpath:
                        fig_fpath_list += [fig_fpath]

        fpath_list = fig_fpath_list + fpaths
        ut.archive_files('chap4.zip', fpath_list)
Exemple #40
0
def find_ext_modules(disable_warnings=True):
    from setuptools import Extension
    import utool
    from os.path import relpath
    cwd = os.getcwd()

    # CYTH      = 'cyth' in sys.argv
    BEXT      = 'bext' in sys.argv
    BUILD     = 'build' in sys.argv
    BUILD_EXT = 'build_ext' in sys.argv

    # if any([BEXT, CYTH]):
    #     translate_cyth()  # translate cyth before finding ext modules

    if not any([BEXT, BUILD, BUILD_EXT]):
        # dont find modules if they are not being built
        return []

    #pyx_list = utool.glob(cwd, '*_cython.pyx', recursive=True)
    pyx_list = utool.glob(cwd, '*.pyx', recursive=True)

    if disable_warnings:
        extra_compile_args = ['-Wno-format', '-Wno-unused-function']
    else:
        extra_compile_args = []

    ext_modules = []
    for pyx_abspath in pyx_list:
        pyx_relpath = relpath(pyx_abspath, cwd)
        pyx_modname, _ = splitext(pyx_relpath.replace('\\', '.').replace('/', '.'))
        print('[find_ext] Found Module:')
        print('   * pyx_modname = %r' % (pyx_modname,))
        print('   * pyx_relpath = %r' % (pyx_relpath,))
        extmod = Extension(pyx_modname, [pyx_relpath],
                           include_dirs=[get_numpy_include_dir()],
                           extra_compile_args=extra_compile_args)
        ext_modules.append(extmod)
    return ext_modules
Exemple #41
0
def glob_projects(pat, user_profile=None):
    """

    def testenv(modname, funcname):
        ut.import_modname(modname)
        exec(ut.execstr_funckw(table.get_rowid), globals())

        pass

        >>> import utool as ut
        >>> ut.testenv('utool.util_project', 'glob_projects', globals())
        >>> from utool.util_project import *  # NOQA
    """
    import utool as ut  # NOQA
    user_profile = ensure_user_profile(user_profile)
    glob_results = ut.flatten([
        ut.glob(dpath,
                pat,
                recursive=True,
                exclude_dirs=user_profile.project_exclude_dirs)
        for dpath in user_profile.project_dpaths
    ])
    return glob_results
def compute_forgroundness(fpath1, kpts1, species="zebra_plains"):
    """
    hack in foregroundness
    """
    import pyrf
    import vtool as vt
    from os.path import exists

    # hack for getting a model (not entirely ibeis independent)
    trees_path = ut.get_app_resource_dir("ibeis", "detectmodels", "rf", species)
    tree_fpath_list = ut.glob(trees_path, "*.txt")
    detector = pyrf.Random_Forest_Detector()
    # TODO; might need to downsample
    forest = detector.forest(tree_fpath_list, verbose=False)
    gpath_list = [fpath1]
    output_gpath_list = [gpath + "." + species + ".probchip.png" for gpath in gpath_list]
    detectkw = {
        "scale_list": [1.15, 1.0, 0.85, 0.7, 0.55, 0.4, 0.25, 0.1],
        "output_gpath_list": output_gpath_list,
        "mode": 1,  # mode one outputs probimage
    }
    results_iter = detector.detect(forest, gpath_list, **detectkw)
    results_list = list(results_iter)  # NOQA
    probchip_list = [vt.imread(gpath, grayscale=True) if exists(gpath) else None for gpath in output_gpath_list]
    # vtpatch.get_warped_patches()
    fgweights_list = []
    kpts_list = [kpts1]
    for probchip, kpts in zip(probchip_list, kpts_list):
        patch_list = [vt.get_warped_patch(probchip, kp)[0].astype(np.float32) / 255.0 for kp in kpts]
        weight_list = [vt.gaussian_average_patch(patch) for patch in patch_list]
        # weight_list = [patch.sum() / (patch.size) for patch in patch_list]
        weights = np.array(weight_list, dtype=np.float32)
        fgweights_list.append(weights)
    fgweights = fgweights_list[0]
    detector.free_forest(forest)
    return fgweights
Exemple #43
0
def find_packages(recursive=True, maxdepth=None):
    """
    Finds all directories with an __init__.py file in them
    """
    import utool
    if utool.VERBOSE:
        print('[util_setup] find_packages(recursive=%r, maxdepth=%r)' %
              (recursive, maxdepth))
    from os.path import relpath
    cwd = os.getcwd()
    init_files = utool.glob(cwd,
                            '__init__.py',
                            recursive=recursive,
                            maxdepth=maxdepth)
    package_paths = list(map(dirname, init_files))
    package_relpaths = [relpath(path, cwd) for path in package_paths]

    packages = []
    for path in package_relpaths:
        base = utool.dirsplit(path)[0]
        if exists(join(base, '__init__.py')):
            package = path.replace('/', '.').replace('\\', '.')
            packages.append(package)
    return packages
def load_oxford_2007():
    """
    Loads data from
    http://www.robots.ox.ac.uk:5000/~vgg/publications/2007/Philbin07/philbin07.pdf

    >>> from wbia.algo.smk.script_smk import *  # NOQA
    """
    from os.path import join, basename, splitext
    import pandas as pd
    import vtool as vt

    dbdir = ut.truepath('/raid/work/Oxford/')
    data_fpath0 = join(dbdir, 'data_2007.pkl')

    if ut.checkpath(data_fpath0):
        data = ut.load_data(data_fpath0)
        return data
    else:
        word_dpath = join(dbdir, 'word_oxc1_hesaff_sift_16M_1M')
        _word_fpath_list = ut.ls(word_dpath)
        imgid_to_word_fpath = {
            splitext(basename(word_fpath))[0]: word_fpath
            for word_fpath in _word_fpath_list
        }
        readme_fpath = join(dbdir, 'README2.txt')
        imgid_order = ut.readfrom(readme_fpath).split('\n')[20:-1]

        imgid_order = imgid_order
        data_uri_order = [x.replace('oxc1_', '') for x in imgid_order]

        imgid_to_df = {}
        for imgid in ut.ProgIter(imgid_order, label='reading kpts'):
            word_fpath = imgid_to_word_fpath[imgid]
            row_gen = (map(float,
                           line.strip('\n').split(' '))
                       for line in ut.read_lines_from(word_fpath)[2:])
            rows = [(int(word_id), x, y, e11, e12, e22)
                    for (word_id, x, y, e11, e12, e22) in row_gen]
            df = pd.DataFrame(
                rows, columns=['word_id', 'x', 'y', 'e11', 'e12', 'e22'])
            imgid_to_df[imgid] = df

        df_list = ut.take(imgid_to_df, imgid_order)

        nfeat_list = [len(df_) for df_ in df_list]
        offset_list = [0] + ut.cumsum(nfeat_list)
        shape = (offset_list[-1], 128)
        # shape = (16334970, 128)
        sift_fpath = join(dbdir, 'OxfordSIFTDescriptors',
                          'feat_oxc1_hesaff_sift.bin')
        try:
            file_ = open(sift_fpath, 'rb')
            with ut.Timer('Reading SIFT binary file'):
                nbytes = np.prod(shape)
                all_vecs = np.fromstring(file_.read(nbytes), dtype=np.uint8)
            all_vecs = all_vecs.reshape(shape)
        finally:
            file_.close()

        kpts_list = [
            df_.loc[:, ('x', 'y', 'e11', 'e12', 'e22')].values
            for df_ in df_list
        ]
        wordid_list = [df_.loc[:, 'word_id'].values for df_ in df_list]
        kpts_Z = np.vstack(kpts_list)
        idx_to_wx = np.hstack(wordid_list)

        # assert len(np.unique(idx_to_wx)) == 1E6

        # Reqd standard query order
        query_files = sorted(
            ut.glob(dbdir + '/oxford_groundtruth', '*_query.txt'))
        query_uri_order = []
        for qpath in query_files:
            text = ut.readfrom(qpath, verbose=0)
            query_uri = text.split(' ')[0].replace('oxc1_', '')
            query_uri_order.append(query_uri)

        logger.info('converting to invV')
        all_kpts = vt.convert_kptsZ_to_kpts(kpts_Z)

        data = {
            'offset_list': offset_list,
            'all_kpts': all_kpts,
            'all_vecs': all_vecs,
            'idx_to_wx': idx_to_wx,
            'data_uri_order': data_uri_order,
            'query_uri_order': query_uri_order,
        }
        ut.save_data(data_fpath0, data)
    return data
Exemple #45
0
def find_unregistered_methods():
    r"""
    CommandLine:
        python -m ibeis.control.controller_inject --test-find_unregistered_methods --enableall

    Example:
        >>> # ENABLE_DOCTEST
        >>> from ibeis.control.controller_inject import *  # NOQA
        >>> result = find_unregistered_methods()
        >>> print(result)
    """
    from os.path import dirname
    import utool as ut
    import ibeis.control
    import re
    #regex = r'[^@]*\ndef'
    modfpath = dirname(ibeis.control.__file__)
    fpath_list = ut.glob(modfpath, 'manual_*_funcs.py')
    #fpath_list += ut.glob(modfpath, '_autogen_*_funcs.py')

    def multiline_grepfile(regex, fpath):
        found_matchtexts = []
        found_linenos   = []
        text = ut.read_from(fpath, verbose=False)
        for match in  re.finditer(regex, text, flags=re.MULTILINE):
            lineno = text[:match.start()].count('\n')
            matchtext = ut.get_match_text(match)
            found_linenos.append(lineno)
            found_matchtexts.append(matchtext)
        return found_matchtexts, found_linenos

    def multiline_grep(regex, fpath_list):
        found_fpath_list      = []
        found_matchtexts_list = []
        found_linenos_list    = []
        for fpath in fpath_list:
            found_matchtexts, found_linenos = multiline_grepfile(regex, fpath)
            # append anything found in this file
            if len(found_matchtexts) > 0:
                found_fpath_list.append(fpath)
                found_matchtexts_list.append(found_matchtexts)
                found_linenos_list.append(found_linenos)
        return found_fpath_list, found_matchtexts_list, found_linenos_list

    def print_mutliline_matches(tup):
        found_fpath_list, found_matchtexts_list, found_linenos_list = tup
        for fpath, found_matchtexts, found_linenos in zip(found_fpath_list,
                                                          found_matchtexts_list,
                                                          found_linenos_list):
            print('+======')
            print(fpath)
            for matchtext, lineno in zip(found_matchtexts, found_linenos):
                print('    ' + '+----')
                print('    ' + str(lineno))
                print('    ' + str(matchtext))
                print('    ' + 'L____')

    #print(match)
    print('\n\n GREPING FOR UNDECORATED FUNCTIONS')
    regex = '^[^@\n]*\ndef\\s.*$'
    tup = multiline_grep(regex, fpath_list)
    print_mutliline_matches(tup)

    print('\n\n GREPING FOR UNDECORATED FUNCTION ALIASES')
    regex = '^' + ut.REGEX_VARNAME + ' = ' + ut.REGEX_VARNAME
    tup = multiline_grep(regex, fpath_list)
    print_mutliline_matches(tup)
Exemple #46
0
def turtles():
    source_dpaths = sorted(ut.glob('/raid/raw/RotanTurtles/', '*',
                                   recusrive=False, with_dirs=True,
                                   with_files=False))
    sources = [SourceDir(dpath) for dpath in source_dpaths]

    for self in ut.ProgIter(sources, label='populate'):
        self.populate()

    import fnmatch
    del_ext = set(['.npy', '.flann', '.npz'])
    for self in ut.ProgIter(sources, label='populate'):
        flags = [ext in del_ext for ext in self.attrs['ext']]
        to_delete = ut.compress(list(self.fpaths()), flags)
        ut.remove_file_list(to_delete)
        flags = [fnmatch.fnmatch(fpath, '*/_hsdb/computed/chips/*.png') for fpath in self.rel_fpath_list]
        to_delete = ut.compress(list(self.fpaths()), flags)
        ut.remove_file_list(to_delete)
        self.populate()

    for self in ut.ProgIter(sources, label='del empty'):
        self.populate()
        self.delete_empty_directories()

    print(ut.byte_str2(sum([self.nbytes() for self in sources])))
    # [ut.byte_str2(self.nbytes()) for self in sources]

    # import numpy as np
    # num_isect = np.zeros((len(sources), len(sources)))
    # num_union = np.zeros((len(sources), len(sources)))

    for i, j in ut.combinations(range(len(sources)), 2):
        s1 = sources[i]
        s2 = sources[j]
        isect = set(s1.rel_fpath_list).intersection(s2.rel_fpath_list)
        # union = set(s1.rel_fpath_list).union(s2.rel_fpath_list)
        if isect:
            s1.isect_info(s2)
            print((i, j))
            print(s1.dpath)
            print(s2.dpath)
            self = s1
            other = s2
            assert False
            # print(isect)
            # break
        # num_isect[i, j] = len(isect)
        # num_union[i, j] = len(union)

    # for self in ut.ProgIter(sources, label='index'):
    #     self.index()

    for self in ut.ProgIter(sources, label='populate'):
        self.populate()

    dest = sources[0]
    others = sources[1:]
    # Merge others into dest
    bash_script = '\n'.join([o.make_merge_bash_script(dest) for o in others])
    print(bash_script)

    other = self
    for other in others:
        other.merge_into(dest)
def get_data_list():
    r"""
    CommandLine:
        python ~/code/ibeis/_installers/ibeis_pyinstaller_data_helper.py --test-get_data_list

    Example:
        >>> # ENABLE_DOCTEST
        >>> from ibeis_pyinstaller_data_helper import *  # NOQA
        >>> result = get_data_list()
        >>> DATATUP_LIST, BINARYTUP_LIST, iconfile = result
        >>> print('DATATUP_LIST = ' + ut.list_str(DATATUP_LIST))
        >>> print('BINARYTUP_LIST = ' + ut.list_str(BINARYTUP_LIST))
        >>> print(len(DATATUP_LIST))
        >>> print(len(BINARYTUP_LIST))
        >>> print(iconfile)

    """
    # Build data before running analysis for quick debugging
    DATATUP_LIST = []
    BINARYTUP_LIST = []

    #import pyhesaff
    #pyhesaff.HESAFF_CLIB.__LIB_FPATH__
    #import pyrf
    #pyrf.RF_CLIB.__LIB_FPATH__
    # Hesaff
    libhesaff_fname = 'libhesaff' + LIB_EXT
    libhesaff_src = realpath(join(root_dir, '..', 'hesaff', 'pyhesaff', libhesaff_fname))
    libhesaff_dst = join(ibsbuild, 'pyhesaff', 'lib', libhesaff_fname)
    DATATUP_LIST.append((libhesaff_dst, libhesaff_src))

    # PyRF
    libpyrf_fname = 'libpyrf' + LIB_EXT
    libpyrf_src = realpath(join(root_dir, '..', 'pyrf', 'pyrf', libpyrf_fname))
    libpyrf_dst = join(ibsbuild, 'pyrf', 'lib', libpyrf_fname)
    DATATUP_LIST.append((libpyrf_dst, libpyrf_src))

    # FLANN
    libflann_fname = 'libflann' + LIB_EXT
    #try:
    #    #import pyflann
    #    #pyflann.__file__
    #    #join(dirname(dirname(pyflann.__file__)), 'build')
    #except ImportError as ex:
    #    print('PYFLANN IS NOT IMPORTABLE')
    #    raise
    #if WIN32 or LINUX:
    # FLANN
    #libflann_src = join_SITE_PACKAGES('pyflann', 'lib', libflann_fname)
    #libflann_dst = join(ibsbuild, libflann_fname)
    #elif APPLE:
    #    # libflann_src = '/pyflann/lib/libflann.dylib'
    #    # libflann_dst = join(ibsbuild, libflann_fname)
    #    libflann_src = join_SITE_PACKAGES('pyflann', 'lib', libflann_fname)
    #    libflann_dst = join(ibsbuild, libflann_fname)
    # This path is when pyflann was built using setup.py develop
    libflann_src = realpath(join(root_dir, '..', 'flann', 'build', 'lib', libflann_fname))
    libflann_dst = join(ibsbuild, 'pyflann', 'lib', libflann_fname)
    DATATUP_LIST.append((libflann_dst, libflann_src))

    # VTool
    vtool_libs = ['libsver']
    for libname in vtool_libs:
        lib_fname = libname + LIB_EXT
        vtlib_src = realpath(join(root_dir, '..', 'vtool', 'vtool', lib_fname))
        vtlib_dst = join(ibsbuild, 'vtool', lib_fname)
        DATATUP_LIST.append((vtlib_dst, vtlib_src))

    linux_lib_dpaths = [
        '/usr/lib/x86_64-linux-gnu',
        '/usr/lib',
        '/usr/local/lib'
    ]

    # OpenMP
    if APPLE:
        # BSDDB, Fix for the modules that PyInstaller needs and (for some reason)
        # are not being added by PyInstaller
        libbsddb_src = '/opt/local/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/lib-dynload/_bsddb.so'
        libbsddb_dst = join(ibsbuild, '_bsddb.so')
        DATATUP_LIST.append((libbsddb_dst, libbsddb_src))
        #libgomp_src = '/opt/local/lib/libgomp.dylib'
        libgomp_src = '/opt/local/lib/gcc48/libgomp.dylib'
        BINARYTUP_LIST.append(('libgomp.1.dylib', libgomp_src, 'BINARY'))

        # very hack
        libiomp_src = '/Users/bluemellophone/code/libomp_oss/exports/mac_32e/lib.thin/libiomp5.dylib'
        BINARYTUP_LIST.append(('libiomp5.dylib', libiomp_src, 'BINARY'))

    if LINUX:
        libgomp_src = ut.search_in_dirs('libgomp.so.1', linux_lib_dpaths)
        ut.assertpath(libgomp_src)
        BINARYTUP_LIST.append(('libgomp.so.1', libgomp_src, 'BINARY'))

    # MinGW
    if WIN32:
        mingw_root = r'C:\MinGW\bin'
        mingw_dlls = ['libgcc_s_dw2-1.dll', 'libstdc++-6.dll', 'libgomp-1.dll', 'pthreadGC2.dll']
        for lib_fname in mingw_dlls:
            lib_src = join(mingw_root, lib_fname)
            lib_dst = join(ibsbuild, lib_fname)
            DATATUP_LIST.append((lib_dst, lib_src))

    # We need to add these 4 opencv libraries because pyinstaller does not find them.
    #OPENCV_EXT = {'win32': '248.dll',
    #              'darwin': '.2.4.dylib',
    #              'linux2': '.so.2.4'}[PLATFORM]

    target_cv_version = '3.0.0'

    OPENCV_EXT = {'win32': target_cv_version.replace('.', '') + '.dll',
                  'darwin': '.' + target_cv_version + '.dylib',
                  'linux2': '.so.' + target_cv_version}[PLATFORM]

    missing_cv_name_list = [
        'libopencv_videostab',
        'libopencv_superres',
        'libopencv_stitching',
        #'libopencv_gpu',
        'libopencv_core',
        'libopencv_highgui',
        'libopencv_imgproc',
    ]
    # Hack to find the appropriate opencv libs
    for name in missing_cv_name_list:
        fname = name + OPENCV_EXT
        src = ''
        dst = ''
        if APPLE:
            src = join('/opt/local/lib', fname)
        elif LINUX:
            #src = join('/usr/lib', fname)
            src, tried = ut.search_in_dirs(fname, linux_lib_dpaths, strict=True, return_tried=True)
        elif WIN32:
            if ut.get_computer_name() == 'Ooo':
                src = join(r'C:/Program Files (x86)/OpenCV/x86/mingw/bin', fname)
            else:
                src = join(root_dir, '../opencv/build/bin', fname)
        dst = join(ibsbuild, fname)
        # ut.assertpath(src)
        DATATUP_LIST.append((dst, src))

    ##################################
    # QT Gui dependencies
    ##################################
    if APPLE:
        walk_path = '/opt/local/Library/Frameworks/QtGui.framework/Versions/4/Resources/qt_menu.nib'
        for root, dirs, files in os.walk(walk_path):
            for lib_fname in files:
                toc_src = join(walk_path, lib_fname)
                toc_dst = join('qt_menu.nib', lib_fname)
                DATATUP_LIST.append((toc_dst, toc_src))

    ##################################
    # Documentation, Icons, and Web Assets
    ##################################
    # Documentation
    #userguide_dst = join('.', '_docs', 'IBEISUserGuide.pdf')
    #userguide_src = join(root_dir, '_docs', 'IBEISUserGuide.pdf')
    #DATATUP_LIST.append((userguide_dst, userguide_src))

    # Icon File
    ICON_EXT = {'darwin': '.icns',
                'win32':  '.ico',
                'linux2': '.ico'}[PLATFORM]
    iconfile = join('_installers', 'ibsicon' + ICON_EXT)
    icon_src = join(root_dir, iconfile)
    icon_dst = join(ibsbuild, iconfile)
    DATATUP_LIST.append((icon_dst, icon_src))

    print('[installer] Checking Data (preweb)')
    try:
        for (dst, src) in DATATUP_LIST:
            assert ut.checkpath(src, verbose=True), 'checkpath for src=%r failed' % (src,)
    except Exception as ex:
        ut.printex(ex, 'Checking data failed DATATUP_LIST=' + ut.list_str(DATATUP_LIST))
        raise

    # Web Assets
    INSTALL_WEB = True and not ut.get_argflag('--noweb')
    if INSTALL_WEB:
        web_root = join('ibeis', 'web/')
        #walk_path = join(web_root, 'static')
        #static_data = []
        #for root, dirs, files in os.walk(walk_path):
        #    root2 = root.replace(web_root, '')
        #    for icon_fname in files:
        #        if '.DS_Store' not in icon_fname:
        #            toc_src = join(abspath(root), icon_fname)
        #            toc_dst = join(root2, icon_fname)
        #            static_data.append((toc_dst, toc_src))
        #ut.get_list_column(static_data, 1) == ut.glob(walk_path, '*', recursive=True, with_dirs=False, exclude_dirs=['.DS_Store'])
        static_src_list = ut.glob(join(web_root, 'static'), '*', recursive=True, with_dirs=False, exclude_dirs=['.DS_Store'])
        static_dst_list = [relpath(src, join(root_dir, 'ibeis')) for src in static_src_list]
        static_data = zip(static_dst_list, static_src_list)
        DATATUP_LIST.extend(static_data)

        #walk_path = join(web_root, 'templates')
        #template_data = []
        #for root, dirs, files in os.walk(walk_path):
        #    root2 = root.replace(web_root, '')
        #    for icon_fname in files:
        #        if '.DS_Store' not in icon_fname:
        #            toc_src = join(abspath(root), icon_fname)
        #            toc_dst = join(root2, icon_fname)
        #            template_data.append((toc_dst, toc_src))
        template_src_list = ut.glob(join(web_root, 'templates'), '*', recursive=True, with_dirs=False, exclude_dirs=['.DS_Store'])
        template_dst_list = [relpath(src, join(root_dir, 'ibeis')) for src in template_src_list]
        template_data = zip(template_dst_list, template_src_list)
        DATATUP_LIST.extend(template_data)

    print('[installer] Checking Data (postweb)')
    try:
        for (dst, src) in DATATUP_LIST:
            assert ut.checkpath(src, verbose=False), 'checkpath for src=%r failed' % (src,)
    except Exception as ex:
        ut.printex(ex, 'Checking data failed DATATUP_LIST=' + ut.list_str(DATATUP_LIST))
        raise

    return DATATUP_LIST, BINARYTUP_LIST, iconfile
Exemple #48
0
 def print_dir_tree(dataset):
     fpaths = ut.glob(dataset.dataset_dpath, '*', recursive=True)
     print('\n'.join(sorted(fpaths)))
Exemple #49
0
        break
    if len(line_list) == 0 or linenum >= len(line_list) - 1:
        print(' ! could not find approprate position')
        linenum = None
    return linenum


if __name__ == '__main__':
    """
    python -m utool.util_scripts.pyproj_checker
    python ~/code/utool/utool/util_scripts/pyproj_checker.py
    """
    import utool as ut
    import re
    exclude_dirs = ['_broken', '_doc', 'build']
    fpath_list = ut.glob('.', '*.py', exclude_dirs=exclude_dirs, recursive=True)

    encoding_line = '# -*- coding: utf-8 -*-'

    pattern_items = [
        encoding_line,
        #'from __future__ import absolute_import, division, print_function',
        #'from __future__ import absolute_import, division, print_function, unicode_literals',
    ]

    show_diff = ut.get_argflag('--diff')
    do_write = ut.get_argflag('--write')

    need_encoding_fpaths = []

    for pat in pattern_items:
Exemple #50
0
# -*- coding: utf-8 -*-
import utool

#def ibeis_wc():


if __name__ == '__main__':
    dpath = '.'
    fpath_list = utool.glob(dpath, '*.py', recursive=True)
    def get_file_stats(fpath):
        text = utool.read_from(fpath, verbose=False)
        lc = len(text.splitlines())
        wc = len(text.split(' '))
        return lc, wc

    stat_list = [get_file_stats(fpath) for fpath in fpath_list]
    lc = sum([stat[0] for stat in stat_list])
    wc = sum([stat[1] for stat in stat_list])

    #wc = sum(len(utool.read_from(fpath).split(' ')) for fpath in fpath_list)
    print('word count = %r' % wc)
    print('line count = %r' % lc)
Exemple #51
0
    if len(line_list) == 0 or linenum >= len(line_list) - 1:
        print(' ! could not find approprate position')
        linenum = None
    return linenum


if __name__ == '__main__':
    """
    python -m utool.util_scripts.pyproj_checker
    python ~/code/utool/utool/util_scripts/pyproj_checker.py
    """
    import utool as ut
    import re
    exclude_dirs = ['_broken', '_doc', 'build']
    fpath_list = ut.glob('.',
                         '*.py',
                         exclude_dirs=exclude_dirs,
                         recursive=True)

    encoding_line = '# -*- coding: utf-8 -*-'

    pattern_items = [
        encoding_line,
        #'from __future__ import absolute_import, division, print_function',
        #'from __future__ import absolute_import, division, print_function, unicode_literals',
    ]

    show_diff = ut.get_argflag('--diff')
    do_write = ut.get_argflag('--write')

    need_encoding_fpaths = []
Exemple #52
0
def make_run_tests_script_text(test_headers, test_argvs, quick_tests=None,
                               repodir=None, exclude_list=[]):
    """
    Autogeneration function

    TODO move to util_autogen or just depricate

    Examples:
        >>> from utool.util_tests import *  # NOQA
        >>> import utool  # NOQA
        >>> testdirs = ['~/code/ibeis/test_ibs*.py']
    """
    import utool as ut
    from os.path import relpath, join, dirname  # NOQA

    exclude_list += ['__init__.py']

    # General format of the testing script

    script_fmtstr = ut.codeblock(
        r'''
        #!/bin/bash
        # Runs all tests
        # Win32 path hacks
        export CWD=$(pwd)
        export PYMAJOR="$(python -c "import sys; print(sys.version_info[0])")"

        # <CORRECT_PYTHON>
        # GET CORRECT PYTHON ON ALL PLATFORMS
        export SYSNAME="$(expr substr $(uname -s) 1 10)"
        if [ "$SYSNAME" = "MINGW32_NT" ]; then
            export PYEXE=python
        else
            if [ "$PYMAJOR" = "3" ]; then
                # virtual env?
                export PYEXE=python
            else
                export PYEXE=python2.7
            fi
        fi
        # </CORRECT_PYTHON>

        PRINT_DELIMETER()
        {{
            printf "\n#\n#\n#>>>>>>>>>>> next_test\n\n"
        }}

        export TEST_ARGV="{test_argvs} $@"

        {dirdef_block}

        # Default tests to run
        set_test_flags()
        {{
            export DEFAULT=$1
        {testdefault_block}
        }}
        set_test_flags OFF
        {testdefaulton_block}

        # Parse for bash commandline args
        for i in "$@"
        do
        case $i in --testall)
            set_test_flags ON
            ;;
        esac
        {testcmdline_block}
        done

        BEGIN_TESTS()
        {{
        cat <<EOF
        {runtests_bubbletext}
        EOF
            echo "BEGIN: TEST_ARGV=$TEST_ARGV"
            PRINT_DELIMETER
            num_passed=0
            num_ran=0
            export FAILED_TESTS=''
        }}

        RUN_TEST()
        {{
            echo "RUN_TEST: $@"
            export TEST="$PYEXE $@ $TEST_ARGV"
            $TEST
            export RETURN_CODE=$?
            echo "RETURN_CODE=$RETURN_CODE"
            PRINT_DELIMETER
            num_ran=$(($num_ran + 1))
            if [ "$RETURN_CODE" == "0" ] ; then
                num_passed=$(($num_passed + 1))
            fi
            if [ "$RETURN_CODE" != "0" ] ; then
                export FAILED_TESTS="$FAILED_TESTS\n$TEST"
            fi
        }}

        END_TESTS()
        {{
            echo "RUN_TESTS: DONE"
            if [ "$FAILED_TESTS" != "" ] ; then
                echo "-----"
                printf "Failed Tests:"
                printf "$FAILED_TESTS\n"
                printf "$FAILED_TESTS\n" >> failed_shelltests.txt
                echo "-----"
            fi
            echo "$num_passed / $num_ran tests passed"
        }}

        #---------------------------------------------
        # START TESTS
        BEGIN_TESTS

        {quicktest_block}

        {test_block}

        #---------------------------------------------
        # END TESTING
        END_TESTS
        ''')

    testcmdline_fmtstr = ut.codeblock(
        r'''
        case $i in --notest{header_lower})
            export {testflag}=OFF
            ;;
        esac
        case $i in --test{header_lower})
            export {testflag}=ON
            ;;
        esac
        ''')

    header_test_block_fmstr = ut.codeblock(
        r'''

        #---------------------------------------------
        #{header_text}
        if [ "${testflag}" = "ON" ] ; then
        cat <<EOF
        {header_bubble_text}
        EOF
        {testlines_block}
        fi
        ''')

    #specialargv = '--noshow'
    specialargv = ''
    testline_fmtstr = 'RUN_TEST ${dirvar}/{fpath} {specialargv}'
    testline_fmtstr2 = 'RUN_TEST {fpath} {specialargv}'

    def format_testline(fpath, dirvar):
        if dirvar is None:
            return testline_fmtstr2.format(fpath=fpath, specialargv=specialargv)
        else:
            return testline_fmtstr.format(dirvar=dirvar, fpath=fpath, specialargv=specialargv)

    default_flag_line_list = []
    defaulton_flag_line_list = []
    testcmdline_list = []
    dirdef_list = []
    header_test_block_list = []

    known_tests = ut.ddict(list)

    # Tests to always run
    if quick_tests is not None:
        quicktest_block = '\n'.join(
            ['# Quick Tests (always run)'] +
            ['RUN_TEST ' + testline for testline in quick_tests])
    else:
        quicktest_block = '# No quick tests'

    # Loop over different test types
    for testdef_tup in test_headers:
        header, default, modname, dpath, pats, testcmds = testdef_tup
        # Build individual test type information
        header_upper =  header.upper()
        header_lower = header.lower()
        testflag = header_upper + '_TEST'

        if modname is not None:
            dirvar = header_upper + '_DIR'
            dirdef = ''.join([
                'export {dirvar}=$($PYEXE -c "',
                'import os, {modname};',
                'print(str(os.path.dirname(os.path.dirname({modname}.__file__))))',
                '")']).format(dirvar=dirvar, modname=modname)
            dirdef_list.append(dirdef)
        else:
            dirvar = None

        # Build test dir
        #dirvar = header_upper + '_DIR'
        #dirdef = 'export {dirvar}={dirname}'.format(dirvar=dirvar, dirname=dirname)
        #dirdef_list.append(dirdef)

        # Build command line flags
        default_flag_line = 'export {testflag}=$DEFAULT'.format(testflag=testflag)

        if default:
            defaulton_flag_line = 'export {testflag}=ON'.format(testflag=testflag)
            defaulton_flag_line_list.append(defaulton_flag_line)

        testcmdline_fmtdict = dict(header_lower=header_lower,
                                        testflag=testflag,)
        testcmdline = testcmdline_fmtstr.format(**testcmdline_fmtdict)

        #ut.ls(dpath)

        # VERY HACK BIT OF CODE

        # Get list of tests from patterns
        if testcmds is None:
            if modname is not None:
                module = __import__(modname)
                repo_path = dirname(dirname(module.__file__))
            else:
                repo_path = repodir
            dpath_ = ut.unixpath(util_path.unixjoin(repo_path, dpath))

            if header_upper == 'OTHER':
                # Hacky way to grab any other tests not explicitly seen in this directory
                _testfpath_list = list(set(ut.glob(dpath_, '*.py')) - set(known_tests[dpath_]))
                #_testfpath_list = ut.glob(dpath_, '*.py')
                #set(known_tests[dpath_])
            else:
                _testfpath_list = ut.flatten([ut.glob(dpath_, pat) for pat in pats])

            def not_excluded(x):
                return not any([x.find(exclude) > -1 for exclude in exclude_list])

            _testfpath_list = list(filter(not_excluded, _testfpath_list))

            known_tests[dpath_].extend(_testfpath_list)
            #print(_testfpath_list)
            testfpath_list = [util_path.unixjoin(dpath, relpath(fpath, dpath_))
                              for fpath in _testfpath_list]

            testline_list = [format_testline(fpath, dirvar) for fpath in testfpath_list]
        else:
            testline_list = testcmds

        testlines_block = ut.indentjoin(testline_list).strip('\n')

        # Construct test block for this type
        header_text = header_upper + ' TESTS'
        headerfont = 'cybermedium'
        header_bubble_text =  ut.indent(ut.bubbletext(header_text, headerfont).strip())
        header_test_block_dict = dict(
            testflag=testflag,
            header_text=header_text,
            testlines_block=testlines_block,
            header_bubble_text=header_bubble_text,)
        header_test_block = header_test_block_fmstr.format(**header_test_block_dict)

        # Append to script lists
        header_test_block_list.append(header_test_block)
        default_flag_line_list.append(default_flag_line)
        testcmdline_list.append(testcmdline)

    runtests_bubbletext = ut.bubbletext('RUN TESTS', 'cyberlarge')

    test_block = '\n'.join(header_test_block_list)
    dirdef_block = '\n'.join(dirdef_list)
    testdefault_block = ut.indent('\n'.join(default_flag_line_list))
    testdefaulton_block = '\n'.join(defaulton_flag_line_list)
    testcmdline_block = '\n'.join(testcmdline_list)

    script_fmtdict = dict(
        quicktest_block=quicktest_block,
        runtests_bubbletext=runtests_bubbletext,
        test_argvs=test_argvs, dirdef_block=dirdef_block,
        testdefault_block=testdefault_block,
        testdefaulton_block=testdefaulton_block,
        testcmdline_block=testcmdline_block,
        test_block=test_block,)
    script_text = script_fmtstr.format(**script_fmtdict)

    return script_text
Exemple #53
0
def turtles():
    source_dpaths = sorted(
        ut.glob('/raid/raw/RotanTurtles/',
                '*',
                recusrive=False,
                with_dirs=True,
                with_files=False))
    sources = [SourceDir(dpath) for dpath in source_dpaths]

    for self in ut.ProgIter(sources, label='populate'):
        self.populate()

    import fnmatch
    del_ext = set(['.npy', '.flann', '.npz'])
    for self in ut.ProgIter(sources, label='populate'):
        flags = [ext in del_ext for ext in self.attrs['ext']]
        to_delete = ut.compress(list(self.fpaths()), flags)
        ut.remove_file_list(to_delete)
        flags = [
            fnmatch.fnmatch(fpath, '*/_hsdb/computed/chips/*.png')
            for fpath in self.rel_fpath_list
        ]
        to_delete = ut.compress(list(self.fpaths()), flags)
        ut.remove_file_list(to_delete)
        self.populate()

    for self in ut.ProgIter(sources, label='del empty'):
        self.populate()
        self.delete_empty_directories()

    print(ut.byte_str2(sum([self.nbytes() for self in sources])))
    # [ut.byte_str2(self.nbytes()) for self in sources]

    # import numpy as np
    # num_isect = np.zeros((len(sources), len(sources)))
    # num_union = np.zeros((len(sources), len(sources)))

    for i, j in ut.combinations(range(len(sources)), 2):
        s1 = sources[i]
        s2 = sources[j]
        isect = set(s1.rel_fpath_list).intersection(s2.rel_fpath_list)
        # union = set(s1.rel_fpath_list).union(s2.rel_fpath_list)
        if isect:
            s1.isect_info(s2)
            print((i, j))
            print(s1.dpath)
            print(s2.dpath)
            self = s1
            other = s2
            assert False
            # print(isect)
            # break
        # num_isect[i, j] = len(isect)
        # num_union[i, j] = len(union)

    # for self in ut.ProgIter(sources, label='index'):
    #     self.index()

    for self in ut.ProgIter(sources, label='populate'):
        self.populate()

    dest = sources[0]
    others = sources[1:]
    # Merge others into dest
    bash_script = '\n'.join([o.make_merge_bash_script(dest) for o in others])
    print(bash_script)

    other = self
    for other in others:
        other.merge_into(dest)
Exemple #54
0
def train(data_path, output_path, batch_size=32):
    # Detect if we have a GPU available

    device = torch.device('cuda:0' if torch.cuda.is_available() else 'cpu')
    using_gpu = str(device) != 'cpu'

    phases = ['train', 'val']

    logger.info('Initializing Datasets and Dataloaders...')

    # Create training and validation datasets
    filepaths = {
        phase: ut.glob(os.path.join(data_path, phase, '*.png'))
        for phase in phases
    }

    datasets = {
        phase: ImageFilePathList(filepaths[phase], transform=TRANSFORMS[phase])
        for phase in phases
    }

    # Create training and validation dataloaders
    dataloaders = {
        phase: torch.utils.data.DataLoader(
            datasets[phase],
            batch_size=batch_size,
            num_workers=batch_size // 8,
            pin_memory=using_gpu,
        )
        for phase in phases
    }

    logger.info('Initializing Model...')

    # Initialize the model for this run
    model = torchvision.models.densenet201(pretrained=True)
    num_ftrs = model.classifier.in_features
    model.classifier = nn.Sequential(nn.Dropout(0.5), nn.Linear(num_ftrs, 4))

    # Send the model to GPU
    model = model.to(device)

    logger.info('Print Examples of Training Augmentation...')

    for phase in phases:
        visualize_augmentations(datasets[phase], AGUEMTNATION[phase], phase)

    logger.info('Initializing Optimizer...')

    # logger.info('Params to learn:')
    params_to_update = []
    for name, param in model.named_parameters():
        if param.requires_grad:
            params_to_update.append(param)
            # logger.info('\t', name)

    # Observe that all parameters are being optimized
    optimizer = optim.SGD(params_to_update, lr=0.0005, momentum=0.9)

    scheduler = optim.lr_scheduler.ReduceLROnPlateau(optimizer,
                                                     'min',
                                                     factor=0.5,
                                                     patience=16,
                                                     min_lr=1e-6)

    logger.info('Start Training...')

    # Train and evaluate
    model = finetune(model, dataloaders, optimizer, scheduler, device)

    ut.ensuredir(output_path)
    weights_path = os.path.join(output_path, 'localizer.canonical.weights')
    weights = {
        'state': copy.deepcopy(model.state_dict()),
    }
    torch.save(weights, weights_path)

    return weights_path
Exemple #55
0
    def fix_empty_dirs(drive):
        """
        # --- FIND EMPTY DIRECTORIES ---
        """
        print('Fixing Empty Dirs in %r' % (drive,))
        fidxs_list = ut.dict_take(drive.dpath_to_fidx, drive.dpath_list)
        isempty_flags = [len(fidxs) == 0 for fidxs in fidxs_list]
        empty_dpaths = ut.compress(drive.dpath_list, isempty_flags)

        def is_cplat_link(path_):
            try:
                if islink(path_):
                    return True
                os.listdir(d)
                return False
            except SystemErrors:
                return True
        valid_flags = [not is_cplat_link(d) for d  in empty_dpaths]
        if not all(valid_flags):
            print('Filtered windows links %r / %r' % (
                len(empty_dpaths) - sum(valid_flags), len(empty_dpaths)))
            #print(ut.list_str(empty_dpaths[0:10]))
            empty_dpaths = ut.compress(empty_dpaths, valid_flags)

        print('Found %r / %r empty_dpaths' % (len(empty_dpaths), len(drive.dpath_list)))
        print(ut.list_str(empty_dpaths[0:10]))

        # Ensure actually still empty
        current_contents = [ut.glob(d, with_dirs=False)
                            for d in ut.ProgIter(empty_dpaths, 'checking empty status')]
        current_lens = list(map(len, current_contents))
        assert not any(current_lens), 'some dirs are not empty'

        # n ** 2 check to get only the base directories
        isbase_dir = [
            not any([d.startswith(dpath_) and d != dpath_
                        for dpath_ in empty_dpaths])
            for d in ut.ProgIter(empty_dpaths, 'finding base dirs')
        ]
        base_empty_dirs = ut.compress(empty_dpaths, isbase_dir)
        def list_only_files(dpath):
            # glob is too slow
            for root, dirs, fpaths in os.walk(dpath):
                for fpath in fpaths:
                    yield fpath
        base_current_contents = [
            list(list_only_files(d))
            for d in ut.ProgIter(base_empty_dirs, 'checking emptyness', freq=10)]
        is_actually_empty = [len(fs) == 0 for fs in base_current_contents]
        not_really_empty = ut.compress(base_empty_dirs, ut.not_list(is_actually_empty))
        print('%d dirs are not actually empty' % (len(not_really_empty),))
        print('not_really_empty = %s' % (ut.list_str(not_really_empty[0:10]),))
        truly_empty_dirs = ut.compress(base_empty_dirs, is_actually_empty)

        def list_all(dpath):
            # glob is too slow
            for root, dirs, fpaths in os.walk(dpath):
                for dir_ in dirs:
                    yield dir_
                for fpath in fpaths:
                    yield fpath

        exclude_base_dirs = [join(drive.root_dpath, 'AppData')]
        exclude_end_dirs = ['__pycache__']
        truly_empty_dirs1 = truly_empty_dirs
        for ed in exclude_base_dirs:
            truly_empty_dirs1 = [
                d for d in truly_empty_dirs1
                if (
                    not any(d.startswith(ed) for ed in exclude_base_dirs) and
                    not any(d.endswith(ed) for ed in exclude_end_dirs)
                )
            ]
        # Ensure actually still empty (with recursive checks for hidden files)
        print('truly_empty_dirs1[::5] = %s' % (
            ut.list_str(truly_empty_dirs1[0::5], strvals=True),))
        #print('truly_empty_dirs1 = %s' % (ut.list_str(truly_empty_dirs1, strvals=True),))

        if not dryrun:
            # FIX PART
            #from os.path import normpath
            #for d in ut.ProgIter(truly_empty_dirs):
            #    break
            #    if ut.WIN32:
            #        # http://www.sevenforums.com/system-security/53095-file-folder-read-only-attribute-wont-disable.html
            #        ut.cmd('attrib', '-r', '-s', normpath(d), verbose=False)
            #x = ut.remove_fpaths(truly_empty_dirs, strict=False)

            print('Deleting %d truly_empty_dirs1' % (len(truly_empty_dirs1),))

            for d in ut.ProgIter(truly_empty_dirs1, 'DELETE empty dirs', freq=1000):  # NOQA
                ut.delete(d, quiet=True)

            if ut.WIN32 and False:
                # remove file that failed removing
                flags = list(map(exists, truly_empty_dirs1))
                truly_empty_dirs1 = ut.compress(truly_empty_dirs1, flags)
                for d in ut.ProgIter(truly_empty_dirs1, 'rming', freq=1000):
                    ut.cmd('rmdir', d)