Example #1
0
def download_tomcat():
    """
    Put tomcat into a directory controlled by ibeis

    CommandLine:
        # Reset
        python -c "import utool as ut; ut.delete(ut.unixjoin(ut.get_app_resource_dir('ibeis'), 'tomcat'))"
    """
    from os.path import splitext, dirname
    print('Grabbing tomcat')
    # FIXME: need to make a stable link
    if ut.WIN32:
        tomcat_binary_url = 'http://mirrors.advancedhosters.com/apache/tomcat/tomcat-8/v8.0.24/bin/apache-tomcat-8.0.24-windows-x86.zip'
    else:
        tomcat_binary_url = 'http://mirrors.advancedhosters.com/apache/tomcat/tomcat-8/v8.0.24/bin/apache-tomcat-8.0.24.zip'
    zip_fpath = ut.grab_file_url(tomcat_binary_url, appname='ibeis')
    # Download tomcat into the IBEIS resource directory
    tomcat_dpath = join(dirname(zip_fpath), 'tomcat')
    if not ut.checkpath(tomcat_dpath, verbose=True):
        # hack because unzipping is still weird
        ut.unzip_file(zip_fpath)
        tomcat_dpath_tmp = splitext(zip_fpath)[0]
        ut.move(tomcat_dpath_tmp, tomcat_dpath)
    if ut.checkpath(join(tomcat_dpath, 'bin'), verbose=True):
        scriptnames = ['catalina.sh', 'startup.sh', 'shutdown.sh']
        for fname in scriptnames:
            fpath = join(tomcat_dpath, 'bin', fname)
            if not ut.is_file_executable(fpath):
                print('Adding executable bits to script %r' % (fpath,))
                ut.chmod_add_executable(fpath)
    return tomcat_dpath
Example #2
0
def reset_testdbs(**kwargs):
    # Step 0) Parse Args
    default_args = {'reset_' + key: False
                    for key in six.iterkeys(TEST_DBNAMES_MAP)}
    default_args['reset_all'] = False
    default_args.update(kwargs)
    argdict = ut.parse_dict_from_argv(default_args)
    if not any(list(six.itervalues(argdict))):
        # Default behavior is to reset the small dbs
        argdict['reset_testdb0'] = True
        argdict['reset_testdb1'] = True
        argdict['reset_testdb_guiall'] = True

    # Step 1) Delete DBs to be Reset
    for key, dbname in six.iteritems(TEST_DBNAMES_MAP):
        if argdict.get('reset_' + key, False) or argdict['reset_all']:
            delete_dbdir(dbname)

    # Step 3) Ensure DBs that dont exist
    ensure_smaller_testingdbs()
    workdir = ibeis.sysres.get_workdir()
    if not ut.checkpath(join(workdir, 'PZ_MTEST'), verbose=True):
        ibeis.ensure_pz_mtest()
    if not ut.checkpath(join(workdir, 'NAUT_test'), verbose=True):
        ibeis.ensure_nauts()
    if not ut.checkpath(join(workdir, 'testdb2'), verbose=True):
        ibeis.init.sysres.ensure_testdb2()

    # Step 4) testdb1 becomes the main database
    workdir = ibeis.sysres.get_workdir()
    TESTDB1 = join(workdir, 'testdb1')
    sysres.set_default_dbdir(TESTDB1)
Example #3
0
def TEST_DELETE_IMAGE(ibs, back):
    gpath_list = grabdata.get_test_gpaths(ndata=None)[0:4]
    gid_list = ibs.add_images(gpath_list)
    bbox_list = [(0, 0, 100, 100)] * len(gid_list)
    name_list = ['a', 'b', 'a', 'd']
    aid_list = ibs.add_annots(gid_list, bbox_list=bbox_list, name_list=name_list)
    gid = gid_list[0]
    assert gid is not None, "gid is None"
    aid_list = ibs.get_image_aids(gid)
    assert len(aid_list) == 1, "Length of aid_list=%r" % (len(aid_list),)
    aid = aid_list[0]
    assert aid is not None, "aid is None"
    cid = ibs.get_annot_cids(aid, ensure=False)
    fid = ibs.get_annot_fids(aid, ensure=False)
    assert cid is None, "cid=%r should be None" % (cid,)
    assert fid is None, "fid=%r should be None" % (fid,)
    cid = ibs.get_annot_cids(aid, ensure=True)
    fid = ibs.get_annot_fids(aid, ensure=True)
    assert cid is not None, "cid should be computed"
    assert fid is not None, "fid should be computed"
    gthumbpath = ibs.get_image_thumbpath(gid)
    athumbpath = ibs.get_annot_chip_thumbpath(aid)
    ibs.delete_images(gid)
    all_gids = ibs.get_valid_gids()
    all_aids = ibs.get_valid_aids()
    all_cids = ibs.get_valid_cids()
    all_fids = ibs.get_valid_fids()
    assert gid not in all_gids, "gid still exists"
    assert aid not in all_aids, "rid %r still exists" % aid
    assert fid not in all_fids, "fid %r still exists" % fid
    assert cid not in all_cids, "cid %r still exists" % cid
    assert not utool.checkpath(gthumbpath), "Thumbnail still exists"
    assert not utool.checkpath(athumbpath), "ANNOTATION Thumbnail still exists"
    return locals()
Example #4
0
def ensure_smaller_testingdbs():
    """
    Makes the smaller test databases
    """
    def make_testdb0():
        """ makes testdb0 """
        def get_test_gpaths(ndata=None, names=None, **kwargs):
            # Read ndata from args or command line
            """ DEPRICATE """
            ndata_arg = ut.get_argval('--ndata', type_=int, default=None, help_='use --ndata to specify bigger data')
            if ndata_arg is not None:
                ndata = ndata_arg
            imgdir = get_testdata_dir(**kwargs)
            gpath_list = sorted(list(ut.list_images(imgdir, full=True, recursive=True)))
            # Get only the gpaths of certain names
            if names is not None:
                gpath_list = [gpath for gpath in gpath_list if
                              ut.basename_noext(gpath) in names]
            # Get a some number of test images
            if ndata is not None:
                gpath_cycle = cycle(gpath_list)
                if six.PY2:
                    gpath_list  = [gpath_cycle.next() for _ in range(ndata)]
                else:
                    gpath_list  = [next(gpath_cycle) for _ in range(ndata)]
            return gpath_list
        workdir = ibeis.sysres.get_workdir()
        TESTDB0 = join(workdir, 'testdb0')
        main_locals = ibeis.main(dbdir=TESTDB0, gui=False, allow_newdir=True)
        ibs = main_locals['ibs']
        assert ibs is not None, str(main_locals)
        gpath_list = list(map(ut.unixpath, get_test_gpaths()))
        #print('[RESET] gpath_list=%r' % gpath_list)
        gid_list = ibs.add_images(gpath_list)  # NOQA
        valid_gids = ibs.get_valid_gids()
        valid_aids = ibs.get_valid_aids()
        try:
            assert len(valid_aids) == 0, 'there are more than 0 annotations in an empty database!'
        except Exception as ex:
            ut.printex(ex, key_list=['valid_aids'])
            raise
        gid_list = valid_gids[0:1]
        bbox_list = [(0, 0, 100, 100)]
        aid = ibs.add_annots(gid_list, bbox_list=bbox_list)[0]
        #print('[RESET] NEW RID=%r' % aid)
        aids = ibs.get_image_aids(gid_list)[0]
        try:
            assert aid in aids, ('bad annotation adder: aid = %r, aids = %r' % (aid, aids))
        except Exception as ex:
            ut.printex(ex, key_list=['aid', 'aids'])
            raise

    get_testdata_dir(True)
    if not ut.checkpath(join(ibeis.sysres.get_workdir(), 'testdb0'), verbose=True):
        print("\n\nMAKE TESTDB0\n\n")
        make_testdb0()
    if not ut.checkpath(join(ibeis.sysres.get_workdir(), 'testdb1'), verbose=True):
        print("\n\nMAKE TESTDB1\n\n")
        ingest_database.ingest_standard_database('testdb1')
Example #5
0
def build_win32_inno_installer():
    inno_dir = r'C:\Program Files (x86)\Inno Setup 5'
    inno_fname = 'ISCC.exe'
    inno_fpath = join(inno_dir, inno_fname)
    cwd = get_setup_dpath()
    iss_script = join(cwd, '_installers', 'win_installer_script.iss')
    assert utool.checkpath(inno_fpath, verbose=True)
    assert utool.checkpath(iss_script, verbose=True)
    utool.cmd([inno_fpath, iss_script])
    import shutil
    installer_src = join(cwd, '_installers', 'Output', 'ibeis-win32-setup.exe')
    installer_dst = join(cwd, 'dist', 'ibeis-win32-setup.exe')
    shutil.move(installer_src, installer_dst)
Example #6
0
def _get_models(ibs, species, modeldir="default", cfg_override=True, verbose=VERBOSE_RF):
    r"""
    Args:
        ibs (IBEISController):  ibeis controller object
        species (?):
        modeldir (str): (default = 'default')
        cfg_override (bool): (default = True)
        verbose (bool):  verbosity flag(default = False)

    Returns:
        ?: fpath_list

    CommandLine:
        python -m ibeis.algo.detect.randomforest --test-_get_models

    Example:
        >>> # ENABLE_DOCTEST
        >>> from ibeis.algo.detect.randomforest import *  # NOQA
        >>> import ibeis
        >>> ibs = ibeis.opendb(defaultdb='testdb1')
        >>> species = ibeis.const.TEST_SPECIES.ZEB_PLAIN
        >>> modeldir = 'default'
        >>> cfg_override = True
        >>> verbose = False
        >>> fpath_list = _get_models(ibs, species, modeldir, cfg_override, verbose)
        >>> result = ('fpath_list = %s' % (str(fpath_list),))
        >>> print(result)
    """
    # with ut.embed_on_exception_context:
    if cfg_override and len(ibs.cfg.detect_cfg.trees_path) > 0:
        trees_path = ibs.cfg.detect_cfg.trees_path
    else:
        # Ensure all models downloaded and accounted for
        assert species is not None, "[_get_models] Cannot detect without specifying a species"
        grabmodels.ensure_models(modeldir=modeldir, verbose=verbose)
        trees_path = grabmodels.get_species_trees_paths(species, modeldir=modeldir)
    # Load tree paths
    if ut.checkpath(trees_path, verbose=verbose):
        fpath_list = ut.ls(trees_path, "*.txt")
        # direct = Directory(trees_path, include_extensions=['txt'])
        # files = direct.files()
    else:
        # If the models do not exist, return None
        fpath_list = None
    if fpath_list is None or len(fpath_list) == 0:
        msg = (
            ut.codeblock(
                """
            [_get_models] Error loading trees, either directory or fpath_list not found
              * trees_path = %r
              * fpath_list = %r
              * species = %r
              * model_dir = %r
              * cfg_override = %r
            """
            )
            % (trees_path, fpath_list, species, modeldir, cfg_override)
        )
        raise AssertionError(msg)
    return fpath_list
Example #7
0
def ensure_text(fname, text, repo_dpath='.', force=None, locals_={}, chmod=None):
    """
    Args:
        fname (str):  file name
        text (str):
        repo_dpath (str):  directory path string(default = '.')
        force (bool): (default = False)
        locals_ (dict): (default = {})

    Example:
        >>> # DISABLE_DOCTEST
        >>> from utool.util_git import *  # NOQA
        >>> import utool as ut
        >>> result = setup_repo()
        >>> print(result)
    """
    import utool as ut
    ut.colorprint('Ensuring fname=%r' % (fname), 'yellow')

    if force is None and ut.get_argflag('--force-%s' % (fname,)):
        force = True

    fpath = join(repo_dpath, fname)
    if force or not ut.checkpath(fpath, verbose=2, n=5):
        text_ = ut.remove_codeblock_syntax_sentinals(text)
        fmtkw = locals_.copy()
        fmtkw['fname'] = fname
        text_ = text_.format(**fmtkw) + '\n'
        ut.writeto(fpath, text_)
        try:
            if chmod:
                ut.chmod(fpath, chmod)
        except Exception as ex:
            ut.printex(ex, iswarning=True)
Example #8
0
def ensure_inno_isinstalled():
    """ Ensures that the current machine has INNO installed. returns path to the
    executable """
    assert ut.WIN32, 'Can only build INNO on windows'
    inno_fpath = ut.search_in_dirs('Inno Setup 5\ISCC.exe', ut.get_install_dirs())
    # Make sure INNO is installed
    if inno_fpath is None:
        print('WARNING: cannot find inno_fpath')
        AUTO_FIXIT = ut.WIN32
        print('Inno seems to not be installed. AUTO_FIXIT=%r' % AUTO_FIXIT)
        if AUTO_FIXIT:
            print('Automaticaly trying to downoad and install INNO')
            # Download INNO Installer
            inno_installer_url = 'http://www.jrsoftware.org/download.php/ispack.exe'
            inno_installer_fpath = ut.download_url(inno_installer_url)
            print('Automaticaly trying to install INNO')
            # Install INNO Installer
            ut.cmd(inno_installer_fpath)
        else:
            inno_homepage_url = 'http://www.jrsoftware.org/isdl.php'
            ut.open_url_in_browser(inno_homepage_url)
            raise AssertionError('Cannot find INNO and AUTOFIX it is false')
        # Ensure that it has now been installed
        inno_fpath = ut.search_in_dirs('Inno Setup 5\ISCC.exe', ut.get_install_dirs())
        assert ut.checkpath(inno_fpath, verbose=True, info=True), 'inno installer is still not installed!'
    return inno_fpath
Example #9
0
def TEST_DELETE_ANNOTATION(ibs, back):
    gpath_list = grabdata.get_test_gpaths(ndata=None)[0:4]
    gid_list = ibs.add_images(gpath_list)
    bbox_list = [(0, 0, 100, 100)] * len(gid_list)
    name_list = ['a', 'b', 'a', 'd']
    aid_list = ibs.add_annots(gid_list, bbox_list=bbox_list, name_list=name_list)
    aid = aid_list[0]
    assert aid is not None, "aid is None"
    cid = ibs.get_annot_cids(aid, ensure=False)
    fid = ibs.get_annot_fids(aid, ensure=False)
    assert cid is None, "cid should be None"
    assert fid is None, "fid should be None"
    cid = ibs.get_annot_cids(aid, ensure=True)
    fid = ibs.get_annot_fids(aid, ensure=True)
    assert cid is not None, "cid should be computed"
    assert fid is not None, "fid should be computed"
    thumbpath = ibs.get_annot_chip_thumbpath(aid)
    ibs.delete_annots(aid)
    aid_list = ibs.get_valid_aids()
    cid_list = ibs.get_valid_cids()
    fid_list = ibs.get_valid_fids()
    assert aid not in aid_list, "RID still exists"
    assert cid not in cid_list, "CID still exists"
    assert fid not in fid_list, "FID still exists"
    assert not utool.checkpath(thumbpath), "Thumbnail still exists"
    return locals()
Example #10
0
def template(template_directory=None, template_filename=None, **kwargs):
    global_args = {
        'NAVBAR': NavbarClass(),
        'YEAR':   date.today().year,
        'URL':    flask.request.url,
        'REFER_SRC_STR':  flask.request.url.replace(flask.request.url_root, ''),
        '__wrapper__' : True,
    }
    global_args['REFER_SRC_ENCODED'] = encode_refer_url(global_args['REFER_SRC_STR'])
    if 'refer' in flask.request.args.keys():
        refer = flask.request.args['refer']
        print('[web] REFER: %r' % (refer, ))
        global_args['REFER_DST_ENCODED'] = refer
        global_args['REFER_DST_STR'] = decode_refer_url(refer)
    if template_directory is None:
        template_directory = ''
        #template_directory = abspath(join(dirname(__file__), 'templates'))
        #template_directory = join(dirname(dirname(__file__)))
    if template_filename is None:
        template_filename = 'index'
    template_ = join(template_directory, template_filename + '.html')
    # Update global args with the template's args
    _global_args = dict(global_args)
    _global_args.update(kwargs)
    print('[appfuncs] template()')
    from ibeis.control import controller_inject
    app = controller_inject.get_flask_app()
    # flask hates windows apparently
    template_ = template_.replace('\\', '/')
    print('[appfuncs.template] * app.template_folder = %r' % (app.template_folder,))
    print('[appfuncs.template] * template_directory = %r' % (template_directory,))
    print('[appfuncs.template] * template_filename = %r' % (template_filename,))
    print('[appfuncs.template] * template_ = %r' % (template_,))
    try:
        ret = flask.render_template(template_, **_global_args)
        #ret = flask.render_template(full_template_fpath, **_global_args)
    except jinja2.exceptions.TemplateNotFound as ex:
        print('Error template not found')
        full_template_fpath = join(app.template_folder, template_)
        print('[appfuncs.template] * full_template_fpath = %r' % (full_template_fpath,))
        ut.checkpath(full_template_fpath, verbose=True)
        ut.printex(ex, 'Template error in appfuncs', tb=True)
        raise
    except Exception as ex:
        ut.printex(ex, 'Error in appfuncs', tb=True)
        raise
    return ret
def TEST_DELETE_ANNOTATION_CHIPS(ibs, back):
    gpath_list = grabdata.get_test_gpaths(ndata=None)[0:4]
    gid_list = ibs.add_images(gpath_list)
    bbox_list = [(0, 0, 100, 100)] * len(gid_list)
    name_list = ['a', 'b', 'a', 'd']
    aid_list = ibs.add_annots(gid_list, bbox_list=bbox_list, name_list=name_list)
    assert len(aid_list) != 0, "No annotations"
    aid = aid_list[0]
    gid = ibs.get_annot_gids(aid)
    assert gid is not None, "gid for aid=%r is None" % (aid,)
    gthumbpath = ibs.get_image_thumbpath(gid)
    annotation_thumbpath = ibs.get_annot_chip_thumbpath(aid)
    ibs.delete_annot_chips(aid)
    aid_list = ibs.get_valid_aids()
    assert aid in aid_list, "Error: Annotation deleted"
    assert not utool.checkpath(gthumbpath), "Image Thumbnail not deleted"
    assert not utool.checkpath(annotation_thumbpath), "Roi Thumbnail not deleted"
    return locals()
Example #12
0
def ensure_inno_script():
    """ writes inno script to disk for win32 installer build """
    cwd = get_setup_dpath()
    iss_script_fpath = join(cwd, '_installers', 'win_installer_script.iss')
    # THE ISS USES {} AS SYNTAX. CAREFUL
    #app_publisher = 'Rensselaer Polytechnic Institute'
    #app_name = 'IBEIS'
    import ibeis
    iss_script_code = ut.codeblock(
        r'''
        ; Script generated by the Inno Setup Script Wizard.
        ; SEE THE DOCUMENTATION FOR DETAILS ON CREATING INNO SETUP SCRIPT FILES!
        ; http://www.jrsoftware.org/isdl.php

        [Setup]
        ; NOTE: The value of AppId uniquely identifies this application.
        ; Do not use the same AppId value in installers for other applications.
        ; (To generate a new GUID, click Tools | Generate GUID inside the IDE.)
        ; Also it seems like the off-balanced curly brace is necessary
        AppId={{47BE3DA2-261D-4672-9849-18BB2EB382FC}
        AppName=IBEIS
        AppVersion=''' + str(ibeis.__version__) + '''
        ;AppVerName=IBEIS 1
        AppPublisher=Rensselaer Polytechnic Institute
        AppPublisherURL=ibeis.org ;www.rpi.edu/~crallj/
        AppSupportURL=ibeis.org ;ww.rpi.edu/~crallj/
        AppUpdatesURL=ibeis.org ;www.rpi.edu/~crallj/
        DefaultDirName={pf}\IBEIS
        DefaultGroupName=IBEIS
        OutputBaseFilename=ibeis-win32-setup
        SetupIconFile=ibsicon.ico
        Compression=lzma
        SolidCompression=yes

        [Languages]
        Name: "english"; MessagesFile: "compiler:Default.isl"

        [Tasks]
        Name: "desktopicon"; Description: "{cm:CreateDesktopIcon}"; GroupDescription: "{cm:AdditionalIcons}"; Flags: unchecked

        [Files]
        Source: "..\dist\ibeis\IBEISApp.exe"; DestDir: "{app}"; Flags: ignoreversion
        Source: "..\dist\ibeis\*"; DestDir: "{app}"; Flags: ignoreversion recursesubdirs createallsubdirs
        ; NOTE: Don't use "Flags: ignoreversion" on any shared system files

        [Icons]
        Name: "{group}\ibeis"; Filename: "{app}\IBEISApp.exe"
        Name: "{commondesktop}\ibeis"; Filename: "{app}\IBEISApp.exe"; Tasks: desktopicon

        [Run]
        Filename: "{app}\IBEISApp.exe"; Description: "{cm:LaunchProgram,IBEIS}"; Flags: nowait postinstall skipifsilent
        '''
    )
    ut.write_to(iss_script_fpath, iss_script_code, onlyifdiff=True)
    assert ut.checkpath(iss_script_fpath, verbose=True, info=True), 'cannot find iss_script_fpath'
    return iss_script_fpath
Example #13
0
def test_file_hash():
    resdir = utool.get_app_resource_dir('utool')
    test_fpath = join(resdir, 'lorium_ipsum.txt')
    if not utool.checkpath(test_fpath, verbose=True, n=100):
        utool.write_to(test_fpath, lorium_text)
    hash_ = utool.get_file_hash(test_fpath)
    target_hash_ = b'\xd1Y\xe5\xa2\xc1\xd8\xb8\nS\xb1?\x16\xfe\xc5\x88\xbd\x9e\xb4\xe3\xda'
    print(repr(hash_))
    print(repr(target_hash_))
    assert hash_ == target_hash_
Example #14
0
def ensure_inno_script():
    """ writes inno script to disk for win32 installer build """
    cwd = get_setup_dpath()
    iss_script_fpath = join(cwd, '_installers', 'win_installer_script.iss')
    # THE ISS USES {} AS SYNTAX. CAREFUL
    #app_publisher = 'Rensselaer Polytechnic Institute'
    #app_name = 'IBEIS'
    import ibeis
    iss_script_code = ut.codeblock(r'''
        ; Script generated by the Inno Setup Script Wizard.
        ; SEE THE DOCUMENTATION FOR DETAILS ON CREATING INNO SETUP SCRIPT FILES!
        ; http://www.jrsoftware.org/isdl.php

        [Setup]
        ; NOTE: The value of AppId uniquely identifies this application.
        ; Do not use the same AppId value in installers for other applications.
        ; (To generate a new GUID, click Tools | Generate GUID inside the IDE.)
        ; Also it seems like the off-balanced curly brace is necessary
        AppId={{47BE3DA2-261D-4672-9849-18BB2EB382FC}
        AppName=IBEIS
        AppVersion=''' + str(ibeis.__version__) + '''
        ;AppVerName=IBEIS 1
        AppPublisher=Rensselaer Polytechnic Institute
        AppPublisherURL=ibeis.org ;www.rpi.edu/~crallj/
        AppSupportURL=ibeis.org ;ww.rpi.edu/~crallj/
        AppUpdatesURL=ibeis.org ;www.rpi.edu/~crallj/
        DefaultDirName={pf}\IBEIS
        DefaultGroupName=IBEIS
        OutputBaseFilename=ibeis-win32-setup
        SetupIconFile=ibsicon.ico
        Compression=lzma
        SolidCompression=yes

        [Languages]
        Name: "english"; MessagesFile: "compiler:Default.isl"

        [Tasks]
        Name: "desktopicon"; Description: "{cm:CreateDesktopIcon}"; GroupDescription: "{cm:AdditionalIcons}"; Flags: unchecked

        [Files]
        Source: "..\dist\ibeis\IBEISApp.exe"; DestDir: "{app}"; Flags: ignoreversion
        Source: "..\dist\ibeis\*"; DestDir: "{app}"; Flags: ignoreversion recursesubdirs createallsubdirs
        ; NOTE: Don't use "Flags: ignoreversion" on any shared system files

        [Icons]
        Name: "{group}\ibeis"; Filename: "{app}\IBEISApp.exe"
        Name: "{commondesktop}\ibeis"; Filename: "{app}\IBEISApp.exe"; Tasks: desktopicon

        [Run]
        Filename: "{app}\IBEISApp.exe"; Description: "{cm:LaunchProgram,IBEIS}"; Flags: nowait postinstall skipifsilent
        ''')
    ut.write_to(iss_script_fpath, iss_script_code, onlyifdiff=True)
    assert ut.checkpath(iss_script_fpath, verbose=True,
                        info=True), 'cannot find iss_script_fpath'
    return iss_script_fpath
Example #15
0
 def get_regen_cmd():
     try:
         if len(sys.argv) > 0 and ut.checkpath(sys.argv[0]):
             # Check if running python command
             if ut.is_python_module(sys.argv[0]):
                 python_exe = ut.python_executable(check=False)
                 modname = ut.get_modname_from_modpath(sys.argv[0])
                 new_argv = [python_exe, '-m', modname] + sys.argv[1:]
                 return ' '.join(new_argv)
     except Exception as ex:
         ut.printex(ex, iswarning=True)
     return ' '.join(sys.argv)
def TEST_DELETE_ANNOTATION_CHIPS(ibs, back):
    gpath_list = grabdata.get_test_gpaths(ndata=None)[0:4]
    gid_list = ibs.add_images(gpath_list)
    bbox_list = [(0, 0, 100, 100)] * len(gid_list)
    name_list = ['a', 'b', 'a', 'd']
    aid_list = ibs.add_annots(gid_list,
                              bbox_list=bbox_list,
                              name_list=name_list)
    assert len(aid_list) != 0, "No annotations"
    aid = aid_list[0]
    gid = ibs.get_annot_gids(aid)
    assert gid is not None, "gid for aid=%r is None" % (aid, )
    gthumbpath = ibs.get_image_thumbpath(gid)
    annotation_thumbpath = ibs.get_annot_chip_thumbpath(aid)
    ibs.delete_annot_chips(aid)
    aid_list = ibs.get_valid_aids()
    assert aid in aid_list, "Error: Annotation deleted"
    assert not utool.checkpath(gthumbpath), "Image Thumbnail not deleted"
    assert not utool.checkpath(
        annotation_thumbpath), "Roi Thumbnail not deleted"
    return locals()
def TEST_DELETE_ANNOTATION_ALL(ibs, back):
    aid_list = ibs.get_valid_aids()
    thumbpath_list = ibs.get_annot_chip_thumbpath(aid_list)
    ibs.delete_annots(aid_list)
    aid_list = ibs.get_valid_aids()
    cid_list = ibs.get_valid_cids()
    fid_list = ibs.get_valid_fids()
    assert len(aid_list) == 0, "Didn't delete all ANNOTATIONs"
    assert len(cid_list) == 0, "Didn't delete all chips"
    assert len(fid_list) == 0, "Didn't delete all features"
    for thumbpath in thumbpath_list:
        assert not utool.checkpath(thumbpath), "Thumbnail still exists"
    return locals()
Example #18
0
 def get_regen_cmd():
     # TODO: move to utool
     try:
         if len(sys.argv) > 0 and ut.checkpath(sys.argv[0]):
             # Check if running python command
             if ut.is_python_module(sys.argv[0]):
                 python_exe = ut.python_executable(check=False)
                 modname = ut.get_modname_from_modpath(sys.argv[0])
                 new_argv = [python_exe, '-m', modname] + sys.argv[1:]
                 return ' '.join(new_argv)
     except Exception as ex:
         ut.printex(ex, iswarning=True)
     return ' '.join(sys.argv)
def TEST_DELETE_ANNOTATION_ALL(ibs, back):
    aid_list = ibs.get_valid_aids()
    thumbpath_list = ibs.get_annot_chip_thumbpath(aid_list)
    ibs.delete_annots(aid_list)
    aid_list = ibs.get_valid_aids()
    cid_list = ibs.get_valid_cids()
    fid_list = ibs.get_valid_fids()
    assert len(aid_list) == 0, "Didn't delete all ANNOTATIONs"
    assert len(cid_list) == 0, "Didn't delete all chips"
    assert len(fid_list) == 0, "Didn't delete all features"
    for thumbpath in thumbpath_list:
        assert not utool.checkpath(thumbpath), "Thumbnail still exists"
    return locals()
Example #20
0
def reset_testdbs(**kwargs):
    # Step 0) Parse Args
    import ibeis
    ibeis.ENABLE_WILDBOOK_SIGNAL = False
    default_args = {
        'reset_' + key: False
        for key in six.iterkeys(TEST_DBNAMES_MAP)
    }
    default_args['reset_all'] = False
    default_args.update(kwargs)
    argdict = ut.parse_dict_from_argv(default_args)
    if not any(list(six.itervalues(argdict))):
        # Default behavior is to reset the small dbs
        argdict['reset_testdb0'] = True
        argdict['reset_testdb1'] = True
        argdict['reset_testdb_guiall'] = True

    # Step 1) Delete DBs to be Reset
    for key, dbname in six.iteritems(TEST_DBNAMES_MAP):
        if argdict.get('reset_' + key, False) or argdict['reset_all']:
            delete_dbdir(dbname)

    # Step 3) Ensure DBs that dont exist
    ensure_smaller_testingdbs()
    workdir = ibeis.sysres.get_workdir()
    if not ut.checkpath(join(workdir, 'PZ_MTEST'), verbose=True):
        ibeis.ensure_pz_mtest()
    if not ut.checkpath(join(workdir, 'NAUT_test'), verbose=True):
        ibeis.ensure_nauts()
    if not ut.checkpath(join(workdir, 'wd_peter2'), verbose=True):
        ibeis.ensure_wilddogs()
    if not ut.checkpath(join(workdir, 'testdb2'), verbose=True):
        ibeis.init.sysres.ensure_testdb2()

    # Step 4) testdb1 becomes the main database
    workdir = ibeis.sysres.get_workdir()
    TESTDB1 = join(workdir, 'testdb1')
    sysres.set_default_dbdir(TESTDB1)
Example #21
0
def grab_selenium_chromedriver(redownload=False):
    r"""
    Automatically download selenium chrome driver if needed

    CommandLine:
        python -m utool.util_grabdata --test-grab_selenium_chromedriver:1

    Example:
        >>> # DISABLE_DOCTEST
        >>> ut.grab_selenium_chromedriver()
        >>> import selenium.webdriver
        >>> driver = selenium.webdriver.Chrome()
        >>> driver.get('http://www.google.com')
        >>> search_field = driver.find_element_by_name('q')
        >>> search_field.send_keys('puppies')
        >>> search_field.send_keys(selenium.webdriver.common.keys.Keys.ENTER)

    Example1:
        >>> # DISABLE_DOCTEST
        >>> import selenium.webdriver
        >>> driver = selenium.webdriver.Firefox()
        >>> driver.get('http://www.google.com')
        >>> search_field = driver.find_element_by_name('q')
        >>> search_field.send_keys('puppies')
        >>> search_field.send_keys(selenium.webdriver.common.keys.Keys.ENTER)
    """
    import utool as ut
    import os
    import stat
    # TODO: use a better download dir (but it must be in the PATh or selenium freaks out)
    chromedriver_dpath = ut.ensuredir(ut.truepath('~/bin'))
    chromedriver_fpath = join(chromedriver_dpath, 'chromedriver')
    if not ut.checkpath(chromedriver_fpath) or redownload:
        assert chromedriver_dpath in os.environ['PATH'].split(os.pathsep)
        # TODO: make this work for windows as well
        if ut.LINUX and ut.util_cplat.is64bit_python():
            import requests
            rsp = requests.get('http://chromedriver.storage.googleapis.com/LATEST_RELEASE')
            assert rsp.status_code == 200
            url = 'http://chromedriver.storage.googleapis.com/' + rsp.text.strip() + '/chromedriver_linux64.zip'
            ut.grab_zipped_url(url, download_dir=chromedriver_dpath, redownload=True)
        else:
            raise AssertionError('unsupported chrome driver getter script')
        if not ut.WIN32:
            st = os.stat(chromedriver_fpath)
            os.chmod(chromedriver_fpath, st.st_mode | stat.S_IEXEC)
    ut.assert_exists(chromedriver_fpath)
    os.environ['webdriver.chrome.driver'] = chromedriver_fpath
    return chromedriver_fpath
def init_theanorc():
    theanorc_fpath = join(os.getenv('HOME'), '.theanorc')
    theanorc_text = ut.codeblock('''
        [global]
        floatX = float32
        device = gpu0
        openmp = True

        [nvcc]
        fastmath = True
        ''')
    if ut.checkpath(theanorc_fpath, verbose=True):
        if not ut.arg_you_sure('overwrite?'):
            return
    ut.write_to(theanorc_fpath, theanorc_text)
Example #23
0
 def __setstate__(ibs, state):
     # Hack to allow for ibeis objects to be pickled
     import ibeis
     dbdir = state['dbdir']
     machine_name = state.pop('machine_name')
     try:
         assert machine_name == ut.get_computer_name(), (
             'ibeis objects can only be picked and unpickled on the same machine')
     except AssertionError as ex:
         iswarning = ut.checkpath(dbdir)
         ut.printex(ex, iswarning=iswarning)
         if not iswarning:
             raise
     ibs2 = ibeis.opendb(dbdir=dbdir, web=False)
     ibs.__dict__.update(**ibs2.__dict__)
Example #24
0
    def load_or_build_flann(dstcnvs_normer, cachedir=None, verbose=True, *args, **kwargs):
        from vtool._pyflann_backend import pyflann as pyflann

        flann_fpath = dstcnvs_normer.get_flann_fpath(cachedir)
        if ut.checkpath(flann_fpath, verbose=ut.VERBOSE):
            try:
                dstcnvs_normer.flann = pyflann.FLANN()
                dstcnvs_normer.flann.load_index(flann_fpath, dstcnvs_normer.vecs)
                assert dstcnvs_normer.flann._FLANN__curindex is not None
                # load_success = True
            except Exception as ex:
                ut.printex(ex, '... cannot load distinctiveness flann', iswarning=True)
                dstcnvs_normer.rebuild(cachedir)
        else:
            dstcnvs_normer.ensure_flann(cachedir)
 def load_or_build_flann(dstcnvs_normer, cachedir=None, verbose=True, *args,
                         **kwargs):
     import pyflann
     flann_fpath = dstcnvs_normer.get_flann_fpath(cachedir)
     if ut.checkpath(flann_fpath, verbose=ut.VERBOSE):
         try:
             dstcnvs_normer.flann = pyflann.FLANN()
             dstcnvs_normer.flann.load_index(flann_fpath, dstcnvs_normer.vecs)
             assert dstcnvs_normer.flann._FLANN__curindex is not None
             #load_success = True
         except Exception as ex:
             ut.printex(ex, '... cannot load distinctiveness flann',
                        iswarning=True)
             dstcnvs_normer.rebuild(cachedir)
     else:
         dstcnvs_normer.ensure_flann(cachedir)
Example #26
0
 def download_image_urls(image_url_info_list):
     # Find ones that we already have
     print('Requested %d downloaded images' % (len(image_url_info_list)))
     full_gpath_list = [join(image_dir, basename(gpath)) for gpath in image_url_info_list]
     exists_list = [ut.checkpath(gpath) for gpath in full_gpath_list]
     image_url_info_list_ = ut.compress(image_url_info_list, ut.not_list(exists_list))
     print('Already have %d/%d downloaded images' % (
         len(image_url_info_list) - len(image_url_info_list_), len(image_url_info_list)))
     print('Need to download %d images' % (len(image_url_info_list_)))
     #import sys
     #sys.exit(0)
     # Download the rest
     imgurl_prefix = 'https://snapshotserengeti.s3.msi.umn.edu/'
     image_url_list = [imgurl_prefix + suffix for suffix in image_url_info_list_]
     for img_url in ut.ProgressIter(image_url_list, lbl='Downloading image'):
         ut.grab_file_url(img_url, download_dir=image_dir)
     return full_gpath_list
def TEST_DELETE_IMAGE_THUMBTUPS(ibs, back):
    gpath_list = grabdata.get_test_gpaths(ndata=None)[0:4]
    gid_list = ibs.add_images(gpath_list)
    bbox_list = [(0, 0, 100, 100)]*len(gid_list)
    name_list = ['a', 'b', 'a', 'd']
    aid_list = ibs.add_annots(gid_list, bbox_list=bbox_list, name_list=name_list)
    assert len(aid_list) !=0, "No annotations added"
    thumbpath_list = ibs.get_image_thumbpath(gid_list)
    gpath_list = ibs.get_image_paths(gid_list)
    ibs.delete_image_thumbtups(gid_list)
    assert utool.is_list(thumbpath_list), "thumbpath_list is not a list"
    assert utool.is_list(gpath_list), "gpath_list is not a list"
    for path in thumbpath_list:
        assert not utool.checkpath(path), "Thumbnail not deleted"
    for path in gpath_list:
        utool.assertpath(path)
    return locals()
Example #28
0
def dump_autogen_code(fpath, autogen_text, codetype='python', fullprint=None):
    """
    Helper that write a file if -w is given on command line, otherwise
    it just prints it out. It has the opption of comparing a diff to the file.
    """
    import utool as ut
    dowrite = ut.get_argflag(('-w', '--write'))
    show_diff = ut.get_argflag('--diff')
    num_context_lines = ut.get_argval('--diff', type_=int, default=None)
    show_diff = show_diff or num_context_lines is not None

    num_context_lines = ut.get_argval('--diff', type_=int, default=None)

    if fullprint is None:
        fullprint = True

    if fullprint is False:
        fullprint = ut.get_argflag('--print')

    print('[autogen] Autogenerated %s...\n+---\n' % (fpath,))
    if not dowrite:
        if fullprint:
            ut.print_code(autogen_text, lexer_name=codetype)
            print('\nL___')
        else:
            print('specify --print to write to stdout')
            pass
        print('specify -w to write, or --diff to compare')
        print('...would write to: %s' % fpath)
    if show_diff:
        if ut.checkpath(fpath, verbose=True):
            prev_text = ut.read_from(fpath)
            textdiff = ut.get_textdiff(prev_text, autogen_text,
                                       num_context_lines=num_context_lines)
            try:
                ut.print_difftext(textdiff)
            except UnicodeDecodeError:
                import unicodedata
                textdiff = unicodedata.normalize('NFKD', textdiff).encode('ascii', 'ignore')
                ut.print_difftext(textdiff)

        if dowrite:
            print('WARNING: Not writing. Remove --diff from command line')
    elif dowrite:
        ut.write_to(fpath, autogen_text)
Example #29
0
def convert_pdf_to_image(pdf_fpath, ext='.jpg', verbose=1, dpi=300,
                         quality=90):
    import utool as ut
    if verbose:
        print('[ut] convert_pdf_to_image.')
    img_fpath = ut.ensure_ext(pdf_fpath, ext)
    if ut.UNIX:
        convert_fpath = ut.cmd2('which convert')['out'].strip()
        if not convert_fpath:
            raise Exception('ImageMagik convert was not found')
    args = ' '.join(['convert', '-density', str(dpi), pdf_fpath, '-quality',
                     str(quality), img_fpath])
    info = ut.cmd2(args, verbose=verbose > 1)  # NOQA
    if not ut.checkpath(img_fpath, verbose=verbose > 1):
        print('Failed to convert pdf to ' + ext)
        print(info['out'])
        raise Exception('ImageMagik failed to convert pdf to ' + ext)
    return img_fpath
Example #30
0
def ensure_local_war(verbose=ut.NOT_QUIET):
    """
    Ensures tomcat has been unpacked and the war is localized

    CommandLine:
        ibeis ensure_local_war

    Example:
        >>> # SCRIPT
        >>> from ibeis.control.wildbook_manager import *  # NOQA
        >>> result = ensure_local_war()
        >>> print(result)
    """
    # TODO: allow custom specified tomcat directory
    try:
        output = subprocess.check_output(['java', '-version'],
                                         stderr=subprocess.STDOUT)
        _java_version = output.split('\n')[0]
        _java_version = _java_version.replace('java version ', '')
        java_version = _java_version.replace('"', '')
        print('java_version = %r' % (java_version,))
        if not java_version.startswith('1.7'):
            print('Warning wildbook is only supported for java 1.7')
    except OSError:
        output = None
    if output is None:
        raise ImportError(
            'Cannot find java on this machine. '
            'Please install java: http://www.java.com/en/download/')

    tomcat_dpath = find_or_download_tomcat()
    assert tomcat_dpath is not None, 'Could not find tomcat'
    redownload = ut.get_argflag('--redownload-war')
    war_fpath = find_or_download_wilbook_warfile(redownload=redownload)
    war_fname = basename(war_fpath)

    # Move the war file to tomcat webapps if not there
    webapps_dpath = join(tomcat_dpath, 'webapps')
    deploy_war_fpath = join(webapps_dpath, war_fname)
    if not ut.checkpath(deploy_war_fpath, verbose=verbose):
        ut.copy(war_fpath, deploy_war_fpath)

    wb_target = splitext(war_fname)[0]
    return tomcat_dpath, webapps_dpath, wb_target
Example #31
0
def ensure_local_war(verbose=ut.NOT_QUIET):
    """
    Ensures tomcat has been unpacked and the war is localized

    CommandLine:
        ibeis ensure_local_war

    Example:
        >>> # SCRIPT
        >>> from ibeis.control.wildbook_manager import *  # NOQA
        >>> result = ensure_local_war()
        >>> print(result)
    """
    # TODO: allow custom specified tomcat directory
    try:
        output = subprocess.check_output(['java', '-version'],
                                         stderr=subprocess.STDOUT)
        _java_version = output.split('\n')[0]
        _java_version = _java_version.replace('java version ', '')
        java_version = _java_version.replace('"', '')
        print('java_version = %r' % (java_version, ))
        if not java_version.startswith('1.7'):
            print('Warning wildbook is only supported for java 1.7')
    except OSError:
        output = None
    if output is None:
        raise ImportError(
            'Cannot find java on this machine. '
            'Please install java: http://www.java.com/en/download/')

    tomcat_dpath = find_or_download_tomcat()
    assert tomcat_dpath is not None, 'Could not find tomcat'
    redownload = ut.get_argflag('--redownload-war')
    war_fpath = find_or_download_wilbook_warfile(redownload=redownload)
    war_fname = basename(war_fpath)

    # Move the war file to tomcat webapps if not there
    webapps_dpath = join(tomcat_dpath, 'webapps')
    deploy_war_fpath = join(webapps_dpath, war_fname)
    if not ut.checkpath(deploy_war_fpath, verbose=verbose):
        ut.copy(war_fpath, deploy_war_fpath)

    wb_target = splitext(war_fname)[0]
    return tomcat_dpath, webapps_dpath, wb_target
Example #32
0
    def publish(dstcnvs_normer, cachedir=None):
        """
        Sets this as the default normalizer available for download
        ONLY DEVELOPERS CAN PERFORM THIS OPERATION

        Args:
            cachedir (str):

        CommandLine:
            python -m wbia.algo.hots.distinctiveness_normalizer --test-publish

        Example:
            >>> # DISABLE_DOCTEST
            >>> from wbia.algo.hots.distinctiveness_normalizer import *  # NOQA
            >>> dstcnvs_normer = testdata_distinctiveness()[0]
            >>> dstcnvs_normer.rebuild()
            >>> dstcnvs_normer.save()
            >>> result = dstcnvs_normer.publish(cachedir)
            >>> # verify results
            >>> print(result)
        """
        from os.path import basename, join

        assert ut.is_developer(), 'ONLY DEVELOPERS CAN PERFORM THIS OPERATION'
        cachedir = dstcnvs_normer.cachedir if cachedir is None else cachedir
        archive_fpath = dstcnvs_normer.archive(cachedir, overwrite=True)
        archive_fname = basename(archive_fpath)
        publish_dpath = PUBLISH_DIR
        publish_fpath = join(publish_dpath, archive_fname)
        if ut.checkpath(publish_fpath, verbose=True):
            logger.info('Overwriting model')
            logger.info(
                'old nBytes(publish_fpath) = %s'
                % (ut.get_file_nBytes_str(publish_fpath),)
            )
            logger.info(
                'new nBytes(archive_fpath) = %s'
                % (ut.get_file_nBytes_str(archive_fpath),)
            )
        else:
            logger.info('Publishing model')
        logger.info('publish_fpath = %r' % (publish_fpath,))
        ut.copy(archive_fpath, publish_fpath)
Example #33
0
def fix_importlib_hook():
    """ IMPORTLIB FIX

    References:
        http://stackoverflow.com/questions/18596410/importerror-no-module-named-mpl-toolkits-with-maptlotlib-1-3-0-and-py2exe
    """
    try:
        dpath_ = importlib.import_module('mpl_toolkits').__path__
        if isinstance(dpath_, (list, tuple)):
            for dpath in dpath_:
                fpath = join(dpath, '__init__.py')
                break
        else:
            dpath = dpath_
        if not ut.checkpath(dpath, verbose=True, info=True):
            ut.touch(fpath)

    except ImportError as ex:
        ut.printex(ex, 'pip install mpl_toolkits?')
Example #34
0
def fix_importlib_hook():
    """ IMPORTLIB FIX

    References:
        http://stackoverflow.com/questions/18596410/importerror-no-module-named-mpl-toolkits-with-maptlotlib-1-3-0-and-py2exe
    """
    try:
        dpath_ = importlib.import_module('mpl_toolkits').__path__
        if isinstance(dpath_, (list, tuple)):
            for dpath in dpath_:
                fpath = join(dpath, '__init__.py')
                break
        else:
            dpath = dpath_
        if not ut.checkpath(dpath, verbose=True, info=True):
            ut.touch(fpath)

    except ImportError as ex:
        ut.printex(ex, 'pip install mpl_toolkits?')
Example #35
0
def get_local_dist_packages_dir():
    """
    Attempts to work around virtualenvs and find the system dist_pacakges.
    Essentially this is implmenented as a lookuptable
    """
    import utool as ut
    if not ut.in_virtual_env():
        # Non venv case
        return get_site_packages_dir()
    else:
        candidates = []
        if ut.LINUX:
            candidates += [
                '/usr/local/lib/python2.7/dist-packages',
            ]
        else:
            raise NotImplementedError()
        for path in candidates:
            if ut.checkpath(path):
                return path
Example #36
0
 def load(nnindexer, cachedir=None, fpath=None, verbose=True):
     r"""
     Loads a cached flann neighbor indexer from disk (not the data)
     """
     load_success = False
     if fpath is None:
         flann_fpath = nnindexer.get_fpath(cachedir)
     else:
         flann_fpath = fpath
     nnindexer.flann_fpath = flann_fpath
     if ut.checkpath(flann_fpath, verbose=verbose):
         idx2_vec = nnindexer.idx2_vec
         # Warning: Loading a FLANN index with old headers may silently fail.
         try:
             nnindexer.flann.load_index(flann_fpath, idx2_vec)
         except (IOError, pyflann.FLANNException) as ex:
             ut.printex(ex, '... cannot load nnindex flann', iswarning=True)
         else:
             load_success = True
     return load_success
Example #37
0
def get_local_dist_packages_dir():
    """
    Attempts to work around virtualenvs and find the system dist_pacakges.
    Essentially this is implmenented as a lookuptable
    """
    import utool as ut
    if not ut.in_virtual_env():
        # Non venv case
        return get_site_packages_dir()
    else:
        candidates = []
        if ut.LINUX:
            candidates += [
                '/usr/local/lib/python2.7/dist-packages',
            ]
        else:
            raise NotImplementedError()
        for path in candidates:
            if ut.checkpath(path):
                return path
Example #38
0
 def load(nnindexer, cachedir=None, fpath=None, verbose=True):
     r"""
     Loads a cached flann neighbor indexer from disk (not the data)
     """
     load_success = False
     if fpath is None:
         flann_fpath = nnindexer.get_fpath(cachedir)
     else:
         flann_fpath = fpath
     nnindexer.flann_fpath = flann_fpath
     if ut.checkpath(flann_fpath, verbose=verbose):
         idx2_vec = nnindexer.idx2_vec
         # Warning: Loading a FLANN index with old headers may silently fail.
         try:
             nnindexer.flann.load_index(flann_fpath, idx2_vec)
         except (IOError, pyflann.FLANNException) as ex:
             ut.printex(ex, '... cannot load nnindex flann', iswarning=True)
         else:
             load_success = True
     return load_success
Example #39
0
def ensure_text(fname,
                text,
                repo_dpath='.',
                force=None,
                locals_={},
                chmod=None):
    """
    Args:
        fname (str):  file name
        text (str):
        repo_dpath (str):  directory path string(default = '.')
        force (bool): (default = False)
        locals_ (dict): (default = {})

    Example:
        >>> # DISABLE_DOCTEST
        >>> from utool.util_git import *  # NOQA
        >>> import utool as ut
        >>> result = setup_repo()
        >>> print(result)
    """
    import utool as ut
    ut.colorprint('Ensuring fname=%r' % (fname), 'yellow')

    if force is None and ut.get_argflag('--force-%s' % (fname, )):
        force = True

    fpath = join(repo_dpath, fname)
    if force or not ut.checkpath(fpath, verbose=2, n=5):
        text_ = ut.remove_codeblock_syntax_sentinals(text)
        fmtkw = locals_.copy()
        fmtkw['fname'] = fname
        text_ = text_.format(**fmtkw) + '\n'
        ut.writeto(fpath, text_)
        try:
            if chmod:
                ut.chmod(fpath, chmod)
        except Exception as ex:
            ut.printex(ex, iswarning=True)
    def publish(dstcnvs_normer, cachedir=None):
        """
        Sets this as the default normalizer available for download
        ONLY DEVELOPERS CAN PERFORM THIS OPERATION

        Args:
            cachedir (str):

        CommandLine:
            python -m ibeis.algo.hots.distinctiveness_normalizer --test-publish

        Example:
            >>> # DISABLE_DOCTEST
            >>> from ibeis.algo.hots.distinctiveness_normalizer import *  # NOQA
            >>> dstcnvs_normer = testdata_distinctiveness()[0]
            >>> dstcnvs_normer.rebuild()
            >>> dstcnvs_normer.save()
            >>> result = dstcnvs_normer.publish(cachedir)
            >>> # verify results
            >>> print(result)
        """
        from os.path import basename, join
        assert ut.is_developer(), 'ONLY DEVELOPERS CAN PERFORM THIS OPERATION'
        cachedir      = dstcnvs_normer.cachedir if cachedir is None else cachedir
        archive_fpath = dstcnvs_normer.archive(cachedir, overwrite=True)
        archive_fname = basename(archive_fpath)
        publish_dpath = PUBLISH_DIR
        publish_fpath = join(publish_dpath, archive_fname)
        if ut.checkpath(publish_fpath, verbose=True):
            print('Overwriting model')
            print('old nBytes(publish_fpath) = %s' %
                  (ut.get_file_nBytes_str(publish_fpath),))
            print('new nBytes(archive_fpath) = %s' %
                  (ut.get_file_nBytes_str(archive_fpath),))
        else:
            print('Publishing model')
        print('publish_fpath = %r' % (publish_fpath,))
        ut.copy(archive_fpath, publish_fpath)
def read_thumb_size(thumb_path):
    import vtool as vt

    if VERBOSE_THUMB:
        print('[ThumbDelegate] Reading thumb size')
    # npimg = vt.imread(thumb_path, delete_if_corrupted=True)
    # (height, width) = npimg.shape[0:2]
    # del npimg
    try:
        width, height = vt.open_image_size(thumb_path)
    except IOError as ex:
        if ut.checkpath(thumb_path, verbose=True):
            ut.printex(
                ex,
                'image=%r seems corrupted. Needs deletion' % (thumb_path, ),
                iswarning=True,
            )
            ut.delete(thumb_path)
        else:
            ut.printex(ex,
                       'image=%r does not exist', (thumb_path, ),
                       iswarning=True)
        raise
    return width, height
Example #42
0
def load_oxford_2007():
    """
    Loads data from
    http://www.robots.ox.ac.uk:5000/~vgg/publications/2007/Philbin07/philbin07.pdf

    >>> from wbia.algo.smk.script_smk import *  # NOQA
    """
    from os.path import join, basename, splitext
    import pandas as pd
    import vtool as vt

    dbdir = ut.truepath('/raid/work/Oxford/')
    data_fpath0 = join(dbdir, 'data_2007.pkl')

    if ut.checkpath(data_fpath0):
        data = ut.load_data(data_fpath0)
        return data
    else:
        word_dpath = join(dbdir, 'word_oxc1_hesaff_sift_16M_1M')
        _word_fpath_list = ut.ls(word_dpath)
        imgid_to_word_fpath = {
            splitext(basename(word_fpath))[0]: word_fpath
            for word_fpath in _word_fpath_list
        }
        readme_fpath = join(dbdir, 'README2.txt')
        imgid_order = ut.readfrom(readme_fpath).split('\n')[20:-1]

        imgid_order = imgid_order
        data_uri_order = [x.replace('oxc1_', '') for x in imgid_order]

        imgid_to_df = {}
        for imgid in ut.ProgIter(imgid_order, label='reading kpts'):
            word_fpath = imgid_to_word_fpath[imgid]
            row_gen = (map(float,
                           line.strip('\n').split(' '))
                       for line in ut.read_lines_from(word_fpath)[2:])
            rows = [(int(word_id), x, y, e11, e12, e22)
                    for (word_id, x, y, e11, e12, e22) in row_gen]
            df = pd.DataFrame(
                rows, columns=['word_id', 'x', 'y', 'e11', 'e12', 'e22'])
            imgid_to_df[imgid] = df

        df_list = ut.take(imgid_to_df, imgid_order)

        nfeat_list = [len(df_) for df_ in df_list]
        offset_list = [0] + ut.cumsum(nfeat_list)
        shape = (offset_list[-1], 128)
        # shape = (16334970, 128)
        sift_fpath = join(dbdir, 'OxfordSIFTDescriptors',
                          'feat_oxc1_hesaff_sift.bin')
        try:
            file_ = open(sift_fpath, 'rb')
            with ut.Timer('Reading SIFT binary file'):
                nbytes = np.prod(shape)
                all_vecs = np.fromstring(file_.read(nbytes), dtype=np.uint8)
            all_vecs = all_vecs.reshape(shape)
        finally:
            file_.close()

        kpts_list = [
            df_.loc[:, ('x', 'y', 'e11', 'e12', 'e22')].values
            for df_ in df_list
        ]
        wordid_list = [df_.loc[:, 'word_id'].values for df_ in df_list]
        kpts_Z = np.vstack(kpts_list)
        idx_to_wx = np.hstack(wordid_list)

        # assert len(np.unique(idx_to_wx)) == 1E6

        # Reqd standard query order
        query_files = sorted(
            ut.glob(dbdir + '/oxford_groundtruth', '*_query.txt'))
        query_uri_order = []
        for qpath in query_files:
            text = ut.readfrom(qpath, verbose=0)
            query_uri = text.split(' ')[0].replace('oxc1_', '')
            query_uri_order.append(query_uri)

        logger.info('converting to invV')
        all_kpts = vt.convert_kptsZ_to_kpts(kpts_Z)

        data = {
            'offset_list': offset_list,
            'all_kpts': all_kpts,
            'all_vecs': all_vecs,
            'idx_to_wx': idx_to_wx,
            'data_uri_order': data_uri_order,
            'query_uri_order': query_uri_order,
        }
        ut.save_data(data_fpath0, data)
    return data
Example #43
0
def install_wildbook(verbose=ut.NOT_QUIET):
    """
    Script to setup wildbook on a unix based system
    (hopefully eventually this will generalize to win32)

    CommandLine:
        # Reset
        ibeis purge_local_wildbook
        ibeis ensure_wb_mysql
        ibeis ensure_local_war
        # Setup
        ibeis install_wildbook
        # ibeis install_wildbook --nomysql
        # Startup
        ibeis startup_wildbook_server --show

        Alternates:
            ibeis install_wildbook --redownload-war
            ibeis install_wildbook --assets
            ibeis startup_wildbook_server --show

    Example:
        >>> # SCRIPT
        >>> from ibeis.control.wildbook_manager import *  # NOQA
        >>> verbose = True
        >>> result = install_wildbook()
        >>> print(result)
    """
    import requests
    # Ensure that the war file has been unpacked
    tomcat_dpath, webapps_dpath, wb_target = ensure_local_war()

    unpacked_war_dpath = join(webapps_dpath, wb_target)
    tomcat_startup_dir = get_tomcat_startup_tmpdir()
    fresh_install = not ut.checkpath(unpacked_war_dpath, verbose=verbose)
    if fresh_install:
        # Need to make sure you start catalina in the same directory otherwise
        # the derby databsae gets put in in cwd
        with ut.ChdirContext(tomcat_startup_dir):
            # Starting and stoping catalina should be sufficient to unpack the
            # war
            startup_fpath = join(tomcat_dpath, 'bin', 'startup.sh')
            #shutdown_fpath = join(tomcat_dpath, 'bin', 'shutdown.sh')
            ut.cmd(ut.quote_single_command(startup_fpath))
            print('It is NOT ok if the startup.sh fails\n')

            # wait for the war to be unpacked
            for retry_count in range(0, 6):
                time.sleep(1)
                if ut.checkpath(unpacked_war_dpath, verbose=True):
                    break
                else:
                    print('Retrying')

            # ensure that the server is ruuning
            print('Checking if we can ping the server')
            response = requests.get('http://localhost:8080')
            if response is None or response.status_code != 200:
                print('There may be an error starting the server')
            else:
                print('Seem able to ping the server')

            # assert tht the war was unpacked
            ut.assertpath(unpacked_war_dpath, (
                'Wildbook war might have not unpacked correctly.  This may '
                'be ok. Try again. If it fails a second time, then there is a '
                'problem.'),
                          verbose=True)

            # Don't shutdown just yet. Need to create assets

    update_wildbook_install_config(webapps_dpath, unpacked_war_dpath)
    asset_flag_fpath = join(tomcat_startup_dir, 'made_assets.flag')

    # Pinging the server to create asset store
    # Ensureing that createAssetStore exists
    if not ut.checkpath(asset_flag_fpath):
        if not fresh_install:
            startup_wildbook_server()
        #web_url = startup_wildbook_server(verbose=False)
        print('Creating asset store')
        wb_url = 'http://localhost:8080/' + wb_target
        response = requests.get(wb_url + '/createAssetStore.jsp')
        if response is None or response.status_code != 200:
            print('There may be an error starting the server')
            #if response.status_code == 500:
            print(response.text)
            assert False, 'response error'
        else:
            print('Created asset store')
            # Create file signaling we did this
            ut.writeto(asset_flag_fpath, 'True')
        shutdown_wildbook_server(verbose=False)
        print('It is ok if the shutdown fails')
    elif fresh_install:
        shutdown_wildbook_server(verbose=False)

    #127.0.0.1:8080/wildbook_data_dir/test.txt
    print('Wildbook is installed and waiting to be started')
Example #44
0
def ensure_smaller_testingdbs():
    """
    Makes the smaller test databases
    """
    def make_testdb0():
        """ makes testdb0 """
        def get_test_gpaths(ndata=None, names=None, **kwargs):
            # Read ndata from args or command line
            """ DEPRICATE """
            ndata_arg = ut.get_argval(
                '--ndata',
                type_=int,
                default=None,
                help_='use --ndata to specify bigger data')
            if ndata_arg is not None:
                ndata = ndata_arg
            imgdir = get_testdata_dir(**kwargs)
            gpath_list = sorted(
                list(ut.list_images(imgdir, full=True, recursive=True)))
            # Get only the gpaths of certain names
            if names is not None:
                gpath_list = [
                    gpath for gpath in gpath_list
                    if ut.basename_noext(gpath) in names
                ]
            # Get a some number of test images
            if ndata is not None:
                gpath_cycle = cycle(gpath_list)
                if six.PY2:
                    gpath_list = [gpath_cycle.next() for _ in range(ndata)]
                else:
                    gpath_list = [next(gpath_cycle) for _ in range(ndata)]
            return gpath_list

        workdir = ibeis.sysres.get_workdir()
        TESTDB0 = join(workdir, 'testdb0')
        main_locals = ibeis.main(dbdir=TESTDB0, gui=False, allow_newdir=True)
        ibs = main_locals['ibs']
        assert ibs is not None, str(main_locals)
        gpath_list = list(map(ut.unixpath, get_test_gpaths()))
        #print('[RESET] gpath_list=%r' % gpath_list)
        gid_list = ibs.add_images(gpath_list)  # NOQA
        valid_gids = ibs.get_valid_gids()
        valid_aids = ibs.get_valid_aids()
        try:
            assert len(
                valid_aids
            ) == 0, 'there are more than 0 annotations in an empty database!'
        except Exception as ex:
            ut.printex(ex, key_list=['valid_aids'])
            raise
        gid_list = valid_gids[0:1]
        bbox_list = [(0, 0, 100, 100)]
        aid = ibs.add_annots(gid_list, bbox_list=bbox_list)[0]
        #print('[RESET] NEW RID=%r' % aid)
        aids = ibs.get_image_aids(gid_list)[0]
        try:
            assert aid in aids, ('bad annotation adder: aid = %r, aids = %r' %
                                 (aid, aids))
        except Exception as ex:
            ut.printex(ex, key_list=['aid', 'aids'])
            raise

    get_testdata_dir(True)
    if not ut.checkpath(join(ibeis.sysres.get_workdir(), 'testdb0'),
                        verbose=True):
        print("\n\nMAKE TESTDB0\n\n")
        make_testdb0()
    if not ut.checkpath(join(ibeis.sysres.get_workdir(), 'testdb1'),
                        verbose=True):
        print("\n\nMAKE TESTDB1\n\n")
        ingest_database.ingest_standard_database('testdb1')
Example #45
0
def _get_models(ibs,
                species,
                modeldir='default',
                cfg_override=True,
                verbose=VERBOSE_RF):
    r"""
    Args:
        ibs (IBEISController):  wbia controller object
        species (?):
        modeldir (str): (default = 'default')
        cfg_override (bool): (default = True)
        verbose (bool):  verbosity flag(default = False)

    Returns:
        ?: fpath_list

    CommandLine:
        python -m wbia.algo.detect.randomforest --test-_get_models

    Example:
        >>> # ENABLE_DOCTEST
        >>> from wbia.algo.detect.randomforest import *  # NOQA
        >>> import wbia
        >>> ibs = wbia.opendb(defaultdb='testdb1')
        >>> species = wbia.const.TEST_SPECIES.ZEB_PLAIN
        >>> modeldir = 'default'
        >>> cfg_override = True
        >>> verbose = False
        >>> fpath_list = _get_models(ibs, species, modeldir, cfg_override, verbose)
        >>> result = ('fpath_list = %s' % (str(fpath_list),))
        >>> print(result)
    """
    # with ut.embed_on_exception_context:
    if cfg_override and len(ibs.cfg.detect_cfg.trees_path) > 0:
        trees_path = ibs.cfg.detect_cfg.trees_path
    else:
        # Ensure all models downloaded and accounted for
        assert (species is not None
                ), '[_get_models] Cannot detect without specifying a species'
        grabmodels.ensure_models(modeldir=modeldir, verbose=verbose)
        trees_path = grabmodels.get_species_trees_paths(species,
                                                        modeldir=modeldir)
    # Load tree paths
    if ut.checkpath(trees_path, verbose=verbose):
        fpath_list = ut.ls(trees_path, '*.txt')
        # direct = Directory(trees_path, include_extensions=['txt'])
        # files = direct.files()
    else:
        # If the models do not exist, return None
        fpath_list = None
    if fpath_list is None or len(fpath_list) == 0:
        msg = (ut.codeblock("""
            [_get_models] Error loading trees, either directory or fpath_list not found
              * trees_path = %r
              * fpath_list = %r
              * species = %r
              * model_dir = %r
              * cfg_override = %r
            """) % (trees_path, fpath_list, species, modeldir, cfg_override))
        raise AssertionError(msg)
    return fpath_list
Example #46
0
def ensure_pz_mtest_batchworkflow_test():
    r"""
    CommandLine:
        python -m ibeis.init.sysres --test-ensure_pz_mtest_batchworkflow_test
        python -m ibeis.init.sysres --test-ensure_pz_mtest_batchworkflow_test --reset
        python -m ibeis.init.sysres --test-ensure_pz_mtest_batchworkflow_test --reset

    Example:
        >>> # SCRIPT
        >>> from ibeis.init.sysres import *  # NOQA
        >>> ensure_pz_mtest_batchworkflow_test()
    """
    import ibeis
    ibeis.ensure_pz_mtest()
    workdir = ibeis.sysres.get_workdir()
    mtest_dbpath = join(workdir, 'PZ_MTEST')

    source_dbdir = mtest_dbpath
    dest_dbdir = join(workdir, 'PZ_BATCH_WORKFLOW_MTEST')

    if ut.get_argflag('--reset'):
        ut.delete(dest_dbdir)

    if ut.checkpath(dest_dbdir):
        return
    else:
        copy_ibeisdb(source_dbdir, dest_dbdir)

    ibs = ibeis.opendb('PZ_BATCH_WORKFLOW_MTEST')
    assert len(ibs.get_valid_aids()) == 119
    assert len(ibs.get_valid_nids()) == 41

    ibs.delete_all_imagesets()

    aid_list = ibs.get_valid_aids()

    unixtime_list = ibs.get_annot_image_unixtimes(aid_list)
    untimed_aids = ut.compress(aid_list, [t == -1 for t in unixtime_list])

    ibs.get_annot_groundtruth(untimed_aids, aid_list)

    aids_list, nid_list = ibs.group_annots_by_name(aid_list)

    hourdiffs_list = ibs.get_name_hourdiffs(nid_list)

    imageset_aids_list = [[] for _ in range(4)]

    imageset_idx = 0

    for hourdiffs, aids in zip(hourdiffs_list, aids_list):
        #import scipy.spatial.distance as spdist
        if len(aids) == 1:
            imageset_aids_list[imageset_idx].extend(aids)
            imageset_idx = (imageset_idx + 1) % len(imageset_aids_list)
        else:
            for chunk in list(ut.ichunks(aids, 2)):
                imageset_aids_list[imageset_idx].extend(chunk)
                imageset_idx = (imageset_idx + 1) % len(imageset_aids_list)

            #import vtool as vt
            #import networkx as netx
            #nodes = list(range(len(aids)))
            #edges_pairs = vt.pdist_argsort(hourdiffs)
            #edge_weights = -hourdiffs[hourdiffs.argsort()]
            #netx_graph = make_netx_graph(edges_pairs, nodes, edge_weights)
            #cut_edges = netx.minimum_edge_cut(netx_graph)
            #netx_graph.remove_edges_from(cut_edges)
            #components = list(netx.connected_components(netx_graph))
            #components = ut.sortedby(components, list(map(len, components)), reverse=True)
            #print(components)
            #imageset_aids_list[0].extend(components[0])
            #for compoment in components:

            # TODO do max-nway cut
        #day_diffs = spdist.squareform(hourdiffs) / 24.0
        #print(ut.numpy_str(day_diffs, precision=2, suppress_small=True))
        #import itertools
        #compare_idxs = [(r, c) for r, c in itertools.product(range(len(aids)), range(len(aids))) if (c > r)]
        #print(len(aids))
    #def make_netx_graph(edges_pairs, nodes=None, edge_weights=None):
    #    import networkx as netx
    #    node_lbls = [('id_', 'int')]

    #    edge_lbls = [('weight', 'float')]
    #    edges = [(pair[0], pair[1], weight) for pair, weight in zip(edges_pairs, edge_weights)]

    #    print('make_netx_graph')
    #    # Make a graph between the chips
    #    netx_nodes = [(ntup[0], {key[0]: val for (key, val) in zip(node_lbls, ntup[1:])})
    #                  for ntup in iter(zip(nodes))]

    #    netx_edges = [(etup[0], etup[1], {key[0]: val for (key, val) in zip(edge_lbls, etup[2:])})
    #                  for etup in iter(edges)]
    #    netx_graph = netx.Graph()
    #    netx_graph.add_nodes_from(netx_nodes)
    #    netx_graph.add_edges_from(netx_edges)
    #    return netx_graph

    # Group into imagesets based on old names
    gids_list = ibs.unflat_map(ibs.get_annot_image_rowids, imageset_aids_list)
    imgsetid_list = ibs.new_imagesets_from_images(gids_list)  # NOQA

    # Remove all names
    ibs.delete_annot_nids(aid_list)
Example #47
0
    # FIX THIS TO POINT TO THE CORRECT DIRECTORY
    # prefix = expanduser(join('~', 'Desktop'))
    prefix = '/Volumes/EXTERNAL/BACKUPS/Dan_2014-03-26_Ol_Pejeta__100GB/Ol_pejeta_zebra_stuff__2GB/'

    print("""
          =====================
          PROCESSING ACTIVITIES
          =====================
          """)
    activities = {}
    columns = [3, 9, 10, 11, 12, 13, 14, 15]
    csv_fpath = join(prefix, 'OPC Zebra database all [exported]', 'csv')
    activity_csv_fpath = join(csv_fpath, 'Group-Habitat-Activity table.csv')
    exportedmdb_fpath = join(csv_fpath, 'Individual sightings.csv')

    utool.checkpath(activity_csv_fpath, verbose=True)
    utool.checkpath(exportedmdb_fpath, verbose=True)

    with open(join(activity_csv_fpath), 'r') as file_:
        lines = file_.read()
        for line in lines.splitlines()[1:]:
            line = [item.strip() for item in line.strip().split(',')]
            _id = line[2]
            if _id not in activities:
                activities[_id] = [line[col] for col in columns]

    originals = join(prefix, 'Ol_pejeta_zebra_photos2__1GB')
    images = Directory(originals)
    image_set = set(images.files())
    print(images)
    exts = []
Example #48
0
def ensure_pz_mtest_mergesplit_test():
    r"""
    Make a test database for MERGE and SPLIT cases

    CommandLine:
        python -m ibeis.init.sysres --test-ensure_pz_mtest_mergesplit_test

    Example:
        >>> # SCRIPT
        >>> from ibeis.init.sysres import *  # NOQA
        >>> ensure_pz_mtest_mergesplit_test()
    """
    import ibeis
    ibeis.ensure_pz_mtest()
    workdir = ibeis.sysres.get_workdir()
    mtest_dbpath = join(workdir, 'PZ_MTEST')

    source_dbdir = mtest_dbpath
    dest_dbdir = join(workdir, 'PZ_MERGESPLIT_MTEST')

    if ut.get_argflag('--reset'):
        ut.delete(dest_dbdir)
    if ut.checkpath(dest_dbdir):
        return

    copy_ibeisdb(source_dbdir, dest_dbdir)

    ibs = ibeis.opendb('PZ_MERGESPLIT_MTEST')
    assert len(ibs.get_valid_aids()) == 119
    assert len(ibs.get_valid_nids()) == 41

    aid_list = ibs.get_valid_aids()
    aids_list, nid_list = ibs.group_annots_by_name(aid_list)
    num_aids = list(map(len, aids_list))

    # num cases wanted
    num_merge = 3
    num_split = 1
    num_combo = 1

    # num inputs needed
    num_merge_names = num_merge
    num_split_names = num_split * 2
    num_combo_names = num_combo * 3

    total_names = num_merge_names + num_split_names + num_combo_names

    modify_aids = ut.take(aids_list, ut.list_argsort(num_aids, reverse=True)[0:total_names])

    merge_nids1 = ibs.make_next_nids(num_merge, location_text='XMERGE')
    merge_nids2 = ibs.make_next_nids(num_merge, location_text='XMERGE')
    split_nid  = ibs.make_next_nids(num_split, location_text='XSPLIT')[0]
    combo_nids = ibs.make_next_nids(num_combo * 2, location_text='XCOMBO')

    # the first 3 become merge cases
    #left = 0
    #right = left + num_merge
    for aids, nid1, nid2 in zip(modify_aids[0:3], merge_nids1, merge_nids2):
        #ibs.get_annot_nids(aids)
        aids_ = aids[::2]
        ibs.set_annot_name_rowids(aids_, [nid1] * len(aids_))
        ibs.set_annot_name_rowids(aids_, [nid2] * len(aids_))

    # the next 2 become split cases
    #left = right
    #right = left + num_split_names
    for aids in modify_aids[3:5]:
        ibs.set_annot_name_rowids(aids, [split_nid] * len(aids))

    #left = right
    #right = left + num_combo_names
    # The final 3 are a combination case
    for aids in modify_aids[5:8]:
        aids_even = aids[::2]
        aids_odd = aids[1::2]
        ibs.set_annot_name_rowids(aids_even, [combo_nids[0]] * len(aids_even))
        ibs.set_annot_name_rowids(aids_odd, [combo_nids[1]] * len(aids_odd))

    final_result = ibs.unflat_map(ibs.get_annot_nids, modify_aids)
    print('final_result = %s' % (ut.list_str(final_result),))
def get_data_list():
    r"""
    CommandLine:
        python ~/code/ibeis/_installers/ibeis_pyinstaller_data_helper.py --test-get_data_list

    Example:
        >>> # ENABLE_DOCTEST
        >>> from ibeis_pyinstaller_data_helper import *  # NOQA
        >>> result = get_data_list()
        >>> DATATUP_LIST, BINARYTUP_LIST, iconfile = result
        >>> print('DATATUP_LIST = ' + ut.list_str(DATATUP_LIST))
        >>> print('BINARYTUP_LIST = ' + ut.list_str(BINARYTUP_LIST))
        >>> print(len(DATATUP_LIST))
        >>> print(len(BINARYTUP_LIST))
        >>> print(iconfile)

    """
    # Build data before running analysis for quick debugging
    DATATUP_LIST = []
    BINARYTUP_LIST = []

    #import pyhesaff
    #pyhesaff.HESAFF_CLIB.__LIB_FPATH__
    #import pyrf
    #pyrf.RF_CLIB.__LIB_FPATH__
    # Hesaff
    libhesaff_fname = 'libhesaff' + LIB_EXT
    libhesaff_src = realpath(
        join(root_dir, '..', 'hesaff', 'pyhesaff', libhesaff_fname))
    libhesaff_dst = join(ibsbuild, 'pyhesaff', 'lib', libhesaff_fname)
    DATATUP_LIST.append((libhesaff_dst, libhesaff_src))

    # PyRF
    libpyrf_fname = 'libpyrf' + LIB_EXT
    libpyrf_src = realpath(join(root_dir, '..', 'pyrf', 'pyrf', libpyrf_fname))
    libpyrf_dst = join(ibsbuild, 'pyrf', 'lib', libpyrf_fname)
    DATATUP_LIST.append((libpyrf_dst, libpyrf_src))

    # FLANN
    libflann_fname = 'libflann' + LIB_EXT
    #try:
    #    #import pyflann
    #    #pyflann.__file__
    #    #join(dirname(dirname(pyflann.__file__)), 'build')
    #except ImportError as ex:
    #    print('PYFLANN IS NOT IMPORTABLE')
    #    raise
    #if WIN32 or LINUX:
    # FLANN
    #libflann_src = join_SITE_PACKAGES('pyflann', 'lib', libflann_fname)
    #libflann_dst = join(ibsbuild, libflann_fname)
    #elif APPLE:
    #    # libflann_src = '/pyflann/lib/libflann.dylib'
    #    # libflann_dst = join(ibsbuild, libflann_fname)
    #    libflann_src = join_SITE_PACKAGES('pyflann', 'lib', libflann_fname)
    #    libflann_dst = join(ibsbuild, libflann_fname)
    # This path is when pyflann was built using setup.py develop
    libflann_src = realpath(
        join(root_dir, '..', 'flann', 'build', 'lib', libflann_fname))
    libflann_dst = join(ibsbuild, 'pyflann', 'lib', libflann_fname)
    DATATUP_LIST.append((libflann_dst, libflann_src))

    # VTool
    vtool_libs = ['libsver']
    for libname in vtool_libs:
        lib_fname = libname + LIB_EXT
        vtlib_src = realpath(join(root_dir, '..', 'vtool', 'vtool', lib_fname))
        vtlib_dst = join(ibsbuild, 'vtool', lib_fname)
        DATATUP_LIST.append((vtlib_dst, vtlib_src))

    linux_lib_dpaths = [
        '/usr/lib/x86_64-linux-gnu', '/usr/lib', '/usr/local/lib'
    ]

    # OpenMP
    if APPLE:
        # BSDDB, Fix for the modules that PyInstaller needs and (for some reason)
        # are not being added by PyInstaller
        libbsddb_src = '/opt/local/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/lib-dynload/_bsddb.so'
        libbsddb_dst = join(ibsbuild, '_bsddb.so')
        DATATUP_LIST.append((libbsddb_dst, libbsddb_src))
        #libgomp_src = '/opt/local/lib/libgomp.dylib'
        libgomp_src = '/opt/local/lib/gcc48/libgomp.dylib'
        BINARYTUP_LIST.append(('libgomp.1.dylib', libgomp_src, 'BINARY'))

        # very hack
        libiomp_src = '/Users/bluemellophone/code/libomp_oss/exports/mac_32e/lib.thin/libiomp5.dylib'
        BINARYTUP_LIST.append(('libiomp5.dylib', libiomp_src, 'BINARY'))

    if LINUX:
        libgomp_src = ut.search_in_dirs('libgomp.so.1', linux_lib_dpaths)
        ut.assertpath(libgomp_src)
        BINARYTUP_LIST.append(('libgomp.so.1', libgomp_src, 'BINARY'))

    # MinGW
    if WIN32:
        mingw_root = r'C:\MinGW\bin'
        mingw_dlls = [
            'libgcc_s_dw2-1.dll', 'libstdc++-6.dll', 'libgomp-1.dll',
            'pthreadGC2.dll'
        ]
        for lib_fname in mingw_dlls:
            lib_src = join(mingw_root, lib_fname)
            lib_dst = join(ibsbuild, lib_fname)
            DATATUP_LIST.append((lib_dst, lib_src))

    # We need to add these 4 opencv libraries because pyinstaller does not find them.
    #OPENCV_EXT = {'win32': '248.dll',
    #              'darwin': '.2.4.dylib',
    #              'linux2': '.so.2.4'}[PLATFORM]

    target_cv_version = '3.0.0'

    OPENCV_EXT = {
        'win32': target_cv_version.replace('.', '') + '.dll',
        'darwin': '.' + target_cv_version + '.dylib',
        'linux2': '.so.' + target_cv_version
    }[PLATFORM]

    missing_cv_name_list = [
        'libopencv_videostab',
        'libopencv_superres',
        'libopencv_stitching',
        #'libopencv_gpu',
        'libopencv_core',
        'libopencv_highgui',
        'libopencv_imgproc',
    ]
    # Hack to find the appropriate opencv libs
    for name in missing_cv_name_list:
        fname = name + OPENCV_EXT
        src = ''
        dst = ''
        if APPLE:
            src = join('/opt/local/lib', fname)
        elif LINUX:
            #src = join('/usr/lib', fname)
            src, tried = ut.search_in_dirs(fname,
                                           linux_lib_dpaths,
                                           strict=True,
                                           return_tried=True)
        elif WIN32:
            if ut.get_computer_name() == 'Ooo':
                src = join(r'C:/Program Files (x86)/OpenCV/x86/mingw/bin',
                           fname)
            else:
                src = join(root_dir, '../opencv/build/bin', fname)
        dst = join(ibsbuild, fname)
        # ut.assertpath(src)
        DATATUP_LIST.append((dst, src))

    ##################################
    # QT Gui dependencies
    ##################################
    if APPLE:
        walk_path = '/opt/local/Library/Frameworks/QtGui.framework/Versions/4/Resources/qt_menu.nib'
        for root, dirs, files in os.walk(walk_path):
            for lib_fname in files:
                toc_src = join(walk_path, lib_fname)
                toc_dst = join('qt_menu.nib', lib_fname)
                DATATUP_LIST.append((toc_dst, toc_src))

    ##################################
    # Documentation, Icons, and Web Assets
    ##################################
    # Documentation
    #userguide_dst = join('.', '_docs', 'IBEISUserGuide.pdf')
    #userguide_src = join(root_dir, '_docs', 'IBEISUserGuide.pdf')
    #DATATUP_LIST.append((userguide_dst, userguide_src))

    # Icon File
    ICON_EXT = {'darwin': '.icns', 'win32': '.ico', 'linux2': '.ico'}[PLATFORM]
    iconfile = join('_installers', 'ibsicon' + ICON_EXT)
    icon_src = join(root_dir, iconfile)
    icon_dst = join(ibsbuild, iconfile)
    DATATUP_LIST.append((icon_dst, icon_src))

    print('[installer] Checking Data (preweb)')
    try:
        for (dst, src) in DATATUP_LIST:
            assert ut.checkpath(
                src, verbose=True), 'checkpath for src=%r failed' % (src, )
    except Exception as ex:
        ut.printex(
            ex,
            'Checking data failed DATATUP_LIST=' + ut.list_str(DATATUP_LIST))
        raise

    # Web Assets
    INSTALL_WEB = True and not ut.get_argflag('--noweb')
    if INSTALL_WEB:
        web_root = join('ibeis', 'web/')
        #walk_path = join(web_root, 'static')
        #static_data = []
        #for root, dirs, files in os.walk(walk_path):
        #    root2 = root.replace(web_root, '')
        #    for icon_fname in files:
        #        if '.DS_Store' not in icon_fname:
        #            toc_src = join(abspath(root), icon_fname)
        #            toc_dst = join(root2, icon_fname)
        #            static_data.append((toc_dst, toc_src))
        #ut.get_list_column(static_data, 1) == ut.glob(walk_path, '*', recursive=True, with_dirs=False, exclude_dirs=['.DS_Store'])
        static_src_list = ut.glob(join(web_root, 'static'),
                                  '*',
                                  recursive=True,
                                  with_dirs=False,
                                  exclude_dirs=['.DS_Store'])
        static_dst_list = [
            relpath(src, join(root_dir, 'ibeis')) for src in static_src_list
        ]
        static_data = zip(static_dst_list, static_src_list)
        DATATUP_LIST.extend(static_data)

        #walk_path = join(web_root, 'templates')
        #template_data = []
        #for root, dirs, files in os.walk(walk_path):
        #    root2 = root.replace(web_root, '')
        #    for icon_fname in files:
        #        if '.DS_Store' not in icon_fname:
        #            toc_src = join(abspath(root), icon_fname)
        #            toc_dst = join(root2, icon_fname)
        #            template_data.append((toc_dst, toc_src))
        template_src_list = ut.glob(join(web_root, 'templates'),
                                    '*',
                                    recursive=True,
                                    with_dirs=False,
                                    exclude_dirs=['.DS_Store'])
        template_dst_list = [
            relpath(src, join(root_dir, 'ibeis')) for src in template_src_list
        ]
        template_data = zip(template_dst_list, template_src_list)
        DATATUP_LIST.extend(template_data)

    print('[installer] Checking Data (postweb)')
    try:
        for (dst, src) in DATATUP_LIST:
            assert ut.checkpath(
                src, verbose=False), 'checkpath for src=%r failed' % (src, )
    except Exception as ex:
        ut.printex(
            ex,
            'Checking data failed DATATUP_LIST=' + ut.list_str(DATATUP_LIST))
        raise

    return DATATUP_LIST, BINARYTUP_LIST, iconfile
 def ensure_flann(dstcnvs_normer, cachedir=None):
     if not ut.checkpath(dstcnvs_normer.get_flann_fpath(cachedir)):
         dstcnvs_normer.rebuild(cachedir)
         dstcnvs_normer.save_flann(cachedir)
Example #51
0
def autogenerate_nth_schema_version(schema_spec, n=-1):
    r"""
    dumps, prints, or diffs autogen schema based on command line

    Args:
        n (int):

    CommandLine:
        python -m ibeis.control._sql_helpers --test-autogenerate_nth_schema_version

    Example:
        >>> # DISABLE_DOCTEST
        >>> from ibeis.control._sql_helpers import *  # NOQA
        >>> from ibeis.control import DB_SCHEMA
        >>> # build test data
        >>> schema_spec = DB_SCHEMA
        >>> n = 1
        >>> # execute function
        >>> tablename = autogenerate_nth_schema_version(schema_spec, n)
        >>> # verify results
        >>> result = str(tablename)
        >>> print(result)
    """
    import utool as ut
    print('[_SQL] AUTOGENERATING CURRENT SCHEMA')
    db = get_nth_test_schema_version(schema_spec, n=n)
    # Auto-generate the version skip schema file
    schema_spec_dir, schema_spec_fname = split(schema_spec.__file__)
    schema_spec_fname = splitext(schema_spec_fname)[0]
    # HACK TO GET AUTOGEN COMMAND
    # FIXME: Make this autogen command a bit more sane and not completely
    # coupled with ibeis
    autogen_cmd = ut.codeblock(
        '''
        python -m ibeis.control.{schema_spec_fname} --test-autogen_{funcname} --force-incremental-db-update --write
        python -m ibeis.control.{schema_spec_fname} --test-autogen_{funcname} --force-incremental-db-update --diff=1
        python -m ibeis.control.{schema_spec_fname} --test-autogen_{funcname} --force-incremental-db-update
        '''
    ).format(schema_spec_fname=schema_spec_fname, funcname=schema_spec_fname.lower())
    autogen_text = db.get_schema_current_autogeneration_str(autogen_cmd)

    autogen_fname = '%s_CURRENT.py' % schema_spec_fname
    autogen_fpath = join(schema_spec_dir, autogen_fname)

    dowrite = ut.get_argflag(('-w', '--write', '--dump-autogen-schema'))
    show_diff = ut.get_argflag('--diff')
    num_context_lines = ut.get_argval('--diff', type_=int, default=None)
    show_diff = show_diff or num_context_lines is not None
    dowrite = dowrite and not show_diff

    if dowrite:
        ut.write_to(autogen_fpath, autogen_text)
    else:
        if show_diff:
            if ut.checkpath(autogen_fpath, verbose=True):
                prev_text = ut.read_from(autogen_fpath)
                textdiff = ut.util_str.get_textdiff(prev_text, autogen_text, num_context_lines=num_context_lines)
                ut.print_difftext(textdiff)
        else:
            ut.util_print.print_python_code(autogen_text)
        print('\nL___\n...would write to: %s' % autogen_fpath)

    print(' Run with -n=%r to get a specific schema version by index. -1 == latest')
    print(' Run with --write to autogenerate latest schema version')
    print(' Run with --diff or --diff=<numcontextlines> to see the difference between current and requested')
    return db
Example #52
0
def update_schema_version(ibs, db, schema_spec, version, version_target,
                          dobackup=True, clearbackup=False):
    """
    version_target = version_expected
    clearbackup = False
    FIXME: AN SQL HELPER FUNCTION SHOULD BE AGNOSTIC TO CONTROLER OBJECTS
    """
    def _check_superkeys():
        all_tablename_list = db.get_table_names()
        # always ignore the metadata table.
        ignore_tables_ = ['metadata']
        tablename_list = [tablename for tablename in all_tablename_list
                          if tablename not in ignore_tables_]
        for tablename in tablename_list:
            superkey_colnames_list = db.get_table_superkey_colnames(tablename)
            # some tables seem to only have old constraints and aren't
            # properly updated to superkeys... weird.
            old_constraints = db.get_table_constraints(tablename)
            assert len(superkey_colnames_list) > 0 or len(old_constraints) > 0, (
                'ERROR UPDATING DATABASE, SUPERKEYS of %s DROPPED!' % (tablename,))

    print('[_SQL] update_schema_version')
    db_fpath = db.fpath
    if dobackup:
        db_dpath, db_fname = split(db_fpath)
        db_fname_noext, ext = splitext(db_fname)
        db_backup_fname = ''.join((db_fname_noext, '_backup', '_v', version, ext))
        db_backup_fpath = join(db_dpath, db_backup_fname)
        count = 0
        # TODO MAKE UTOOL THAT DOES THIS (there might be one in util_logging)
        while ut.checkpath(db_backup_fpath, verbose=True):
            db_backup_fname = ''.join((db_fname_noext, '_backup', '_v',
                                       version, '_copy', str(count), ext))
            db_backup_fpath = join(db_dpath, db_backup_fname)
            count += 1
        ut.copy(db_fpath, db_backup_fpath)

    legacy_update_funcs = schema_spec.LEGACY_UPDATE_FUNCTIONS
    for legacy_version, func in legacy_update_funcs:
        if compare_string_versions(version, legacy_version) == -1:
            func(db)
    db_versions = schema_spec.VALID_VERSIONS
    valid_versions = sorted(db_versions.keys(), compare_string_versions)
    try:
        start_index = valid_versions.index(version) + 1
    except IndexError:
        raise AssertionError('[!update_schema_version]'
                             ' The current database version is unknown')
    try:
        end_index = valid_versions.index(version_target) + 1
    except IndexError:
        raise AssertionError('[!update_schema_version]'
                             ' The target database version is unknown')

    try:
        print('Update path: %r ' % (valid_versions[start_index:end_index]))
        for index in range(start_index, end_index):
            next_version = valid_versions[index]
            print('Updating database to version: %r' % (next_version))
            pre, update, post = db_versions[next_version]
            if pre is not None:
                pre(db, ibs=ibs)
            if update is not None:
                update(db, ibs=ibs)
            if post is not None:
                post(db, ibs=ibs)
            _check_superkeys()
    except Exception as ex:
        if dobackup:
            msg = 'The database update failed, rolled back to the original version.'
            ut.printex(ex, msg, iswarning=True)
            ut.remove_file(db_fpath)
            ut.copy(db_backup_fpath, db_fpath)
            if clearbackup:
                ut.remove_file(db_backup_fpath)
            raise
        else:
            ut.printex(ex, (
                'The database update failed, and no backup was made.'),
                iswarning=False)
            raise
    if dobackup and clearbackup:
        ut.remove_file(db_backup_fpath)
Example #53
0
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
script to open directory in current window manager
"""
import utool as ut

if __name__ == '__main__':
    import sys
    if len(sys.argv) == 2:
        path = sys.argv[1]
    else:
        path = None
    ut.assertpath(path)
    if ut.checkpath(path, verbose=True):
        ut.view_directory(path)
    #F:\\data\\work\\PZ_MTEST\\_ibsdb\\
Example #54
0
def grab_liberty_siam_dataset(pairs=250000):
    """
    References:
        http://www.cs.ubc.ca/~mbrown/patchdata/patchdata.html
        https://github.com/osdf/datasets/blob/master/patchdata/dataset.py

    Notes:
        "info.txt" contains the match information Each row of info.txt
        corresponds corresponds to a separate patch, with the patches ordered
        from left to right and top to bottom in each bitmap image.

        3 types of metadata files

        info.txt - contains patch ids that correspond with the order of patches
          in the bmp images
          In the format:
              pointid, unused

        interest.txt -
            interest points corresponding to patches with patchids
            has same number of rows as info.txt
            In the format:
                reference image id, x, y, orientation, scale (in log2 units)

        m50_<d>_<d>_0.txt -
             matches files
             patchID1  3DpointID1  unused1  patchID2  3DpointID2  unused2

    CommandLine:
        python -m ibeis_cnn.ingest_data --test-grab_liberty_siam_dataset --show

    Example:
        >>> # ENABLE_DOCTEST
        >>> from ibeis_cnn.ingest_data import *  # NOQA
        >>> pairs = 500
        >>> dataset = grab_liberty_siam_dataset(pairs)
        >>> ut.quit_if_noshow()
        >>> from ibeis_cnn import draw_results
        >>> #ibsplugin.rrr()
        >>> flat_metadata = {}
        >>> data, labels = dataset.subset('full')
        >>> ut.quit_if_noshow()
        >>> warped_patch1_list = data[::2]
        >>> warped_patch2_list = data[1::2]
        >>> dataset.interact()
        >>> ut.show_if_requested()
    """
    datakw = {
        'detector': 'dog',
        'pairs': pairs,
    }

    assert datakw['detector'] in ['dog', 'harris']
    assert pairs in [500, 50000, 100000, 250000]

    liberty_urls = {
        'dog': 'http://www.cs.ubc.ca/~mbrown/patchdata/liberty.zip',
        'harris': 'http://www.cs.ubc.ca/~mbrown/patchdata/liberty_harris.zip',
    }
    url = liberty_urls[datakw['detector']]
    ds_path = ut.grab_zipped_url(url)

    ds_name = splitext(basename(ds_path))[0]
    alias_key = 'liberty;' + ut.dict_str(datakw, nl=False, explicit=True)
    cfgstr = ','.join([str(val) for key, val in ut.iteritems_sorted(datakw)])

    # TODO: allow a move of the base data prefix

    training_dpath = ut.ensure_app_resource_dir('ibeis_cnn', 'training',
                                                ds_name)
    if ut.get_argflag('--vtd'):
        ut.vd(training_dpath)
    ut.ensuredir(training_dpath)

    data_fpath = join(training_dpath, 'liberty_data_' + cfgstr + '.pkl')
    labels_fpath = join(training_dpath, 'liberty_labels_' + cfgstr + '.pkl')

    if not ut.checkpath(data_fpath, verbose=True):
        data, labels = ingest_helpers.extract_liberty_style_patches(
            ds_path, pairs)
        ut.save_data(data_fpath, data)
        ut.save_data(labels_fpath, labels)

    # hack for caching num_labels
    labels = ut.load_data(labels_fpath)
    num_labels = len(labels)

    dataset = DataSet.new_training_set(
        alias_key=alias_key,
        data_fpath=data_fpath,
        labels_fpath=labels_fpath,
        metadata_fpath=None,
        training_dpath=training_dpath,
        data_shape=(64, 64, 1),
        data_per_label=2,
        output_dims=1,
        num_labels=num_labels,
    )
    return dataset
Example #55
0
def archive_files(archive_fpath, fpath_list, small=True, allowZip64=False,
                  overwrite=False, verbose=True, common_prefix=False):
    r"""
    Adds the files in `fpath_list` to an zip/tar archive.

    Args:
        archive_fpath (str): path to zipfile to create
        fpath_list (list): path of files to add to the zipfile
        small (bool): if True uses compression but the zipfile will take more
            time to write
        allowZip64 (bool): use if a file is over 2GB
        overwrite (bool):
        verbose (bool):  verbosity flag(default = True)
        common_prefix (bool): (default = False)

    References:
        https://docs.python.org/2/library/zipfile.html

    CommandLine:
        python -m utool.util_grabdata --test-archive_files

    Example:
        >>> # SLOW_DOCTEST
        >>> from utool.util_grabdata import *  # NOQA
        >>> import utool as ut
        >>> archive_fpath = ut.get_app_resource_dir('utool', 'testarchive.zip')
        >>> # remove an existing test archive
        >>> ut.delete(archive_fpath)
        >>> assert not exists(archive_fpath), 'archive should not exist'
        >>> fpath_list = [ut.grab_test_imgpath(key) for key in ut.TESTIMG_URL_DICT]
        >>> small = True
        >>> allowZip64 = False
        >>> overwrite = True
        >>> result = archive_files(archive_fpath, fpath_list, small, allowZip64, overwrite)
        >>> # verify results
        >>> print(result)
        >>> assert exists(archive_fpath), 'archive should exist'

    Ignore:
        # http://superuser.com/questions/281573/best-options-compressing-files-7-zip
        # Create a small 7zip archive
        7z a -t7z -m0=lzma -mx=9 -mfb=64 -md=32m -ms=on archive.7z dir1
        7z a -t7z -m0=lzma -mx=9 -mfb=64 -md=32m -ms=on ibeis-linux-binary.7z ibeis

        # Create a small zip archive
        7za a -mm=Deflate -mfb=258 -mpass=15 -r ibeis-linux-binary.zip ibeis

    """
    import utool as ut
    from os.path import relpath, dirname
    if not overwrite and ut.checkpath(archive_fpath, verbose=True):
        raise AssertionError('cannot overrwite archive_fpath=%r' % (archive_fpath,))
    print('Archiving %d files' % len(fpath_list))
    compression = zipfile.ZIP_DEFLATED if small else zipfile.ZIP_STORED
    if common_prefix:
        # Note: common prefix does not care about file structures
        if isinstance(common_prefix, six.string_types):
            # use given path as base path
            rel_arcpath = common_prefix
        else:
            rel_arcpath = commonprefix(fpath_list)
        rel_arcpath = ut.longest_existing_path(rel_arcpath)
    else:
        rel_arcpath = dirname(archive_fpath)
    with zipfile.ZipFile(archive_fpath, 'w', compression, allowZip64) as myzip:
        for fpath in ut.ProgressIter(fpath_list, lbl='archiving files',
                                     enabled=verbose, adjust=True):
            arcname = relpath(fpath, rel_arcpath)
            myzip.write(fpath, arcname)
Example #56
0
def ggr_random_name_splits():
    """
    CommandLine:
        python -m wbia.viz.viz_graph2 ggr_random_name_splits --show

    Ignore:
        sshfs -o idmap=user lev:/ ~/lev

    Example:
        >>> # DISABLE_DOCTEST
        >>> from wbia.viz.viz_graph2 import *  # NOQA
        >>> ggr_random_name_splits()
    """
    import wbia.guitool as gt

    gt.ensure_qtapp()
    # nid_list = ibs.get_valid_nids(filter_empty=True)
    import wbia

    dbdir = '/media/danger/GGR/GGR-IBEIS'
    dbdir = (dbdir if ut.checkpath(dbdir) else
             ut.truepath('~/lev/media/danger/GGR/GGR-IBEIS'))
    ibs = wbia.opendb(dbdir=dbdir, allow_newdir=False)

    import datetime

    day1 = datetime.date(2016, 1, 30)
    day2 = datetime.date(2016, 1, 31)

    orig_filter_kw = {
        'multiple': None,
        # 'view': ['right'],
        # 'minqual': 'good',
        'is_known': True,
        'min_pername': 2,
    }
    orig_aids = ibs.filter_annots_general(filter_kw=ut.dict_union(
        orig_filter_kw,
        {
            'min_unixtime':
            ut.datetime_to_posixtime(ut.date_to_datetime(day1, 0.0)),
            'max_unixtime':
            ut.datetime_to_posixtime(ut.date_to_datetime(day2, 1.0)),
        },
    ))
    orig_all_annots = ibs.annots(orig_aids)
    orig_unique_nids, orig_grouped_annots_ = orig_all_annots.group(
        orig_all_annots.nids)
    # Ensure we get everything
    orig_grouped_annots = [
        ibs.annots(aids_) for aids_ in ibs.get_name_aids(orig_unique_nids)
    ]

    # pip install quantumrandom
    if False:
        import quantumrandom

        data = quantumrandom.uint16()
        seed = data.sum()
        print('seed = %r' % (seed, ))
        # import Crypto.Random
        # from Crypto import Random
        # quantumrandom.get_data()
        # StrongRandom = Crypto.Random.random.StrongRandom
        # aes.reseed(3340258)
        # chars = [str(chr(x)) for x in data.view(np.uint8)]
        # aes_seed = str('').join(chars)
        # aes = Crypto.Random.Fortuna.FortunaGenerator.AESGenerator()
        # aes.reseed(aes_seed)
        # aes.pseudo_random_data(10)

    orig_rand_idxs = ut.random_indexes(len(orig_grouped_annots), seed=3340258)
    orig_sample_size = 75
    random_annot_groups = ut.take(orig_grouped_annots, orig_rand_idxs)
    orig_annot_sample = random_annot_groups[:orig_sample_size]

    # OOOPS MADE ERROR REDO ----

    filter_kw = {
        'multiple': None,
        'view': ['right'],
        'minqual': 'good',
        'is_known': True,
        'min_pername': 2,
    }
    filter_kw_ = ut.dict_union(
        filter_kw,
        {
            'min_unixtime':
            ut.datetime_to_posixtime(ut.date_to_datetime(day1, 0.0)),
            'max_unixtime':
            ut.datetime_to_posixtime(ut.date_to_datetime(day2, 1.0)),
        },
    )
    refiltered_sample = [
        ibs.filter_annots_general(annot.aids, filter_kw=filter_kw_)
        for annot in orig_annot_sample
    ]
    is_ok = np.array(ut.lmap(len, refiltered_sample)) >= 2
    ok_part_orig_sample = ut.compress(orig_annot_sample, is_ok)
    ok_part_orig_nids = [x.nids[0] for x in ok_part_orig_sample]

    # Now compute real sample
    aids = ibs.filter_annots_general(filter_kw=filter_kw_)
    all_annots = ibs.annots(aids)
    unique_nids, grouped_annots_ = all_annots.group(all_annots.nids)
    grouped_annots = grouped_annots_
    # Ensure we get everything
    # grouped_annots = [ibs.annots(aids_) for aids_ in ibs.get_name_aids(unique_nids)]

    pop = len(grouped_annots)
    pername_list = ut.lmap(len, grouped_annots)
    groups = wbia.annots.AnnotGroups(grouped_annots, ibs)
    match_tags = [ut.unique(ut.flatten(t)) for t in groups.match_tags]
    tag_case_hist = ut.dict_hist(ut.flatten(match_tags))
    print('name_pop = %r' % (pop, ))
    print('Annots per Multiton Name' +
          ut.repr3(ut.get_stats(pername_list, use_median=True)))
    print('Name Tag Hist ' + ut.repr3(tag_case_hist))
    print('Percent Photobomb: %.2f%%' %
          (tag_case_hist['photobomb'] / pop * 100))
    print('Percent Split: %.2f%%' % (tag_case_hist['splitcase'] / pop * 100))

    # Remove the ok part from this sample
    remain_unique_nids = ut.setdiff(unique_nids, ok_part_orig_nids)
    remain_grouped_annots = [
        ibs.annots(aids_) for aids_ in ibs.get_name_aids(remain_unique_nids)
    ]

    sample_size = 75
    import vtool as vt

    vt.calc_sample_from_error_bars(0.05, pop, conf_level=0.95, prior=0.05)

    remain_rand_idxs = ut.random_indexes(len(remain_grouped_annots),
                                         seed=3340258)
    remain_sample_size = sample_size - len(ok_part_orig_nids)
    remain_random_annot_groups = ut.take(remain_grouped_annots,
                                         remain_rand_idxs)
    remain_annot_sample = remain_random_annot_groups[:remain_sample_size]

    annot_sample_nofilter = ok_part_orig_sample + remain_annot_sample
    # Filter out all bad parts
    annot_sample_filter = [
        ibs.annots(ibs.filter_annots_general(annot.aids, filter_kw=filter_kw_))
        for annot in annot_sample_nofilter
    ]
    annot_sample = annot_sample_filter

    win = None
    from wbia.viz import viz_graph2

    for annots in ut.InteractiveIter(annot_sample):
        if win is not None:
            win.close()
        win = viz_graph2.make_qt_graph_interface(ibs,
                                                 aids=annots.aids,
                                                 init_mode='rereview')
        print(win)

    sample_groups = wbia.annots.AnnotGroups(annot_sample, ibs)

    flat_tags = [ut.unique(ut.flatten(t)) for t in sample_groups.match_tags]

    print('Using Split and Photobomb')
    is_positive = ['photobomb' in t or 'splitcase' in t for t in flat_tags]
    num_positive = sum(is_positive)
    vt.calc_error_bars_from_sample(sample_size,
                                   num_positive,
                                   pop,
                                   conf_level=0.95)

    print('Only Photobomb')
    is_positive = ['photobomb' in t for t in flat_tags]
    num_positive = sum(is_positive)
    vt.calc_error_bars_from_sample(sample_size,
                                   num_positive,
                                   pop,
                                   conf_level=0.95)

    print('Only SplitCase')
    is_positive = ['splitcase' in t for t in flat_tags]
    num_positive = sum(is_positive)
    vt.calc_error_bars_from_sample(sample_size,
                                   num_positive,
                                   pop,
                                   conf_level=0.95)
Example #57
0
def ensure_pz_mtest_mergesplit_test():
    r"""
    Make a test database for MERGE and SPLIT cases

    CommandLine:
        python -m ibeis.init.sysres --test-ensure_pz_mtest_mergesplit_test

    Example:
        >>> # SCRIPT
        >>> from ibeis.init.sysres import *  # NOQA
        >>> ensure_pz_mtest_mergesplit_test()
    """
    import ibeis
    ibeis.ensure_pz_mtest()
    workdir = ibeis.sysres.get_workdir()
    mtest_dbpath = join(workdir, 'PZ_MTEST')

    source_dbdir = mtest_dbpath
    dest_dbdir = join(workdir, 'PZ_MERGESPLIT_MTEST')

    if ut.get_argflag('--reset'):
        ut.delete(dest_dbdir)
    if ut.checkpath(dest_dbdir):
        return

    copy_ibeisdb(source_dbdir, dest_dbdir)

    ibs = ibeis.opendb('PZ_MERGESPLIT_MTEST')
    assert len(ibs.get_valid_aids()) == 119
    assert len(ibs.get_valid_nids()) == 41

    aid_list = ibs.get_valid_aids()
    aids_list, nid_list = ibs.group_annots_by_name(aid_list)
    num_aids = list(map(len, aids_list))

    # num cases wanted
    num_merge = 3
    num_split = 1
    num_combo = 1

    # num inputs needed
    num_merge_names = num_merge
    num_split_names = num_split * 2
    num_combo_names = num_combo * 3

    total_names = num_merge_names + num_split_names + num_combo_names

    modify_aids = ut.take(
        aids_list,
        ut.list_argsort(num_aids, reverse=True)[0:total_names])

    merge_nids1 = ibs.make_next_nids(num_merge, location_text='XMERGE')
    merge_nids2 = ibs.make_next_nids(num_merge, location_text='XMERGE')
    split_nid = ibs.make_next_nids(num_split, location_text='XSPLIT')[0]
    combo_nids = ibs.make_next_nids(num_combo * 2, location_text='XCOMBO')

    # the first 3 become merge cases
    #left = 0
    #right = left + num_merge
    for aids, nid1, nid2 in zip(modify_aids[0:3], merge_nids1, merge_nids2):
        #ibs.get_annot_nids(aids)
        aids_ = aids[::2]
        ibs.set_annot_name_rowids(aids_, [nid1] * len(aids_))
        ibs.set_annot_name_rowids(aids_, [nid2] * len(aids_))

    # the next 2 become split cases
    #left = right
    #right = left + num_split_names
    for aids in modify_aids[3:5]:
        ibs.set_annot_name_rowids(aids, [split_nid] * len(aids))

    #left = right
    #right = left + num_combo_names
    # The final 3 are a combination case
    for aids in modify_aids[5:8]:
        aids_even = aids[::2]
        aids_odd = aids[1::2]
        ibs.set_annot_name_rowids(aids_even, [combo_nids[0]] * len(aids_even))
        ibs.set_annot_name_rowids(aids_odd, [combo_nids[1]] * len(aids_odd))

    final_result = ibs.unflat_map(ibs.get_annot_nids, modify_aids)
    print('final_result = %s' % (ut.list_str(final_result), ))