Пример #1
0
def revert_to_backup(ibs):
    r"""
    Args:
        db_dir (?):

    CommandLine:
        python -m ibeis.control._sql_helpers --exec-revert_to_backup

    Example:
        >>> # SCRIPT
        >>> from ibeis.control._sql_helpers import *  # NOQA
        >>> import ibeis
        >>> ibs = ibeis.opendb(defaultdb='GZ_Master1')
        >>> result = revert_to_backup(ibs)
        >>> print(result)
    """
    db_path = ibs.get_db_core_path()
    ibs.disconnect_sqldatabase()
    backup_dir = ibs.backupdir

    ut.move(db_path, ut.get_nonconflicting_path(db_path + 'revertfrom.%d.orig'))
    # Carefull may invalidate the cache
    fname, ext = splitext(db_path)
    path_list = sorted(ut.glob(backup_dir, '*%s' % ext))
    previous_backup = path_list[-1]
    ut.copy(previous_backup, db_path)
Пример #2
0
def ensure_ctags_win32():
    import utool as ut
    from os.path import join
    dpath = ut.grab_zipped_url('http://prdownloads.sourceforge.net/ctags/ctags58.zip')
    """
    TODO: Download the zipfile, then unzip and take ONLY the
    file ctags58/ctags58/ctags.exe and move it somewhere in the path
    the best place might be C;\ProgFiles\Git\mingw64\bin

    ALSO:
    make a win setup file

    Downloads fonts from https://www.dropbox.com/sh/49h1ht1e2t7dlbj/AACzVIDrfn1GkImP5l_C3Vtia?dl=1
    """

    ctags_fname = 'ctags.exe'
    ctags_src = join(dpath, ctags_fname)
    def find_mingw_bin():
        pathdirs = ut.get_path_dirs()
        copydir = None
        # hueristic for finding mingw bin
        for pathdir in pathdirs:
            pathdir_ = pathdir.lower()
            ismingwbin = (pathdir_.find('mingw') > -1 and pathdir_.endswith('bin'))
            if ismingwbin:
                issmaller = (copydir is None or len(pathdir) < len(copydir))
                if issmaller:
                    copydir = pathdir
        return copydir
    copydir = find_mingw_bin()
    ctags_dst = join(copydir, ctags_fname)
    ut.copy(ctags_src, ctags_dst, overwrite=False)
Пример #3
0
def copy_ibeisdb(source_dbdir, dest_dbdir):
    # TODO: rectify with rsync, script, and merge script.
    from os.path import normpath
    import ibeis
    exclude_dirs_ = (ibeis.const.EXCLUDE_COPY_REL_DIRS +
                     ['_hsdb', '.hs_internals'])
    exclude_dirs = [ut.ensure_unixslash(normpath(rel))
                    for rel in exclude_dirs_]

    rel_tocopy = ut.glob(source_dbdir, '*', exclude_dirs=exclude_dirs,
                         recursive=True, with_files=True, with_dirs=False,
                         fullpath=False)
    rel_tocopy_dirs = ut.glob(source_dbdir, '*', exclude_dirs=exclude_dirs,
                              recursive=True, with_files=False, with_dirs=True,
                              fullpath=False)

    src_list = [join(source_dbdir, relpath) for relpath in rel_tocopy]
    dst_list = [join(dest_dbdir, relpath) for relpath in rel_tocopy]

    # ensure directories exist
    rel_tocopy_dirs = [dest_dbdir] + [join(dest_dbdir, dpath_)
                                      for dpath_ in rel_tocopy_dirs]
    for dpath in rel_tocopy_dirs:
        ut.ensuredir(dpath)
    # copy files
    ut.copy(src_list, dst_list)
Пример #4
0
 def flush_copy_tasks(self):
     # Execute all copy tasks and empty the lists
     if ut.NOT_QUIET:
         print('[DRAW_RESULT] copying %r summaries' % (len(self.cp_task_list)))
     for src, dst in self.cp_task_list:
         ut.copy(src, dst, verbose=False)
     del self.cp_task_list[:]
Пример #5
0
def init_console2():
    assert ut.WIN32, 'win32 only script'
    url = 'http://downloads.sourceforge.net/project/console/console-devel/2.00/Console-2.00b148-Beta_32bit.zip'
    unzipped_fpath = ut.grab_zipped_url(url)
    # FIXME: bugged
    unzipped_fpath2 = join(dirname(unzipped_fpath), 'Console2')
    win32_bin = ut.truepath('~/local/PATH')
    ut.copy(ut.ls(unzipped_fpath2), win32_bin)
Пример #6
0
 def flush_copy_tasks(self):
     # Execute all copy tasks and empty the lists
     if ut.NOT_QUIET:
         print('[DRAW_RESULT] copying %r summaries' %
               (len(self.cp_task_list)))
     for src, dst in self.cp_task_list:
         ut.copy(src, dst, verbose=False)
     del self.cp_task_list[:]
Пример #7
0
def labeler_train(ibs,
                  species_list=None,
                  species_mapping=None,
                  viewpoint_mapping=None,
                  ensembles=3,
                  **kwargs):
    from ibeis_cnn.ingest_ibeis import get_cnn_labeler_training_images_pytorch
    from ibeis.algo.detect import densenet

    species = '-'.join(species_list)
    args = (species, )
    data_path = join(ibs.get_cachedir(), 'extracted-labeler-%s' % args)
    extracted_path = get_cnn_labeler_training_images_pytorch(
        ibs,
        category_list=species_list,
        category_mapping=species_mapping,
        viewpoint_mapping=viewpoint_mapping,
        dest_path=data_path,
        **kwargs)

    weights_path_list = []
    for ensemble_num in range(ensembles):
        args = (
            species,
            ensemble_num,
        )
        output_path = join(ibs.get_cachedir(), 'training',
                           'labeler-%s-ensemble-%d' % args)
        if exists(output_path):
            ut.delete(output_path)
        weights_path = densenet.train(extracted_path,
                                      output_path,
                                      blur=False,
                                      flip=False)
        weights_path_list.append(weights_path)

    args = (species, )
    output_name = 'labeler.%s' % args
    ensemble_path = join(ibs.get_cachedir(), 'training', output_name)
    ut.ensuredir(ensemble_path)

    archive_path = '%s.zip' % (ensemble_path)
    ensemble_weights_path_list = []

    for index, weights_path in enumerate(sorted(weights_path_list)):
        assert exists(weights_path)
        ensemble_weights_path = join(ensemble_path,
                                     'labeler.%d.weights' % (index, ))
        ut.copy(weights_path, ensemble_weights_path)
        ensemble_weights_path_list.append(ensemble_weights_path)

    ensemble_weights_path_list = [ensemble_path] + ensemble_weights_path_list
    ut.archive_files(archive_path,
                     ensemble_weights_path_list,
                     overwrite=True,
                     common_prefix=True)

    return archive_path
Пример #8
0
def canonical_classifier_train(ibs,
                               species,
                               ensembles=3,
                               extracted_path=None,
                               **kwargs):
    from wbia.other.detectexport import (
        get_cnn_classifier_canonical_training_images_pytorch, )
    from wbia.algo.detect import densenet

    args = (species, )
    data_path = join(ibs.get_cachedir(),
                     'extracted-classifier-canonical-%s' % args)
    if extracted_path is None:
        extracted_path = get_cnn_classifier_canonical_training_images_pytorch(
            ibs,
            species,
            dest_path=data_path,
        )

    weights_path_list = []
    for ensemble_num in range(ensembles):
        args = (
            species,
            ensemble_num,
        )
        output_path = join(ibs.get_cachedir(), 'training',
                           'classifier-canonical-%s-ensemble-%d' % args)
        if exists(output_path):
            ut.delete(output_path)
        weights_path = densenet.train(extracted_path,
                                      output_path,
                                      blur=False,
                                      flip=False)
        weights_path_list.append(weights_path)

    args = (species, )
    output_name = 'classifier.canonical.%s' % args
    ensemble_path = join(ibs.get_cachedir(), 'training', output_name)
    ut.ensuredir(ensemble_path)

    archive_path = '%s.zip' % (ensemble_path)
    ensemble_weights_path_list = []

    for index, weights_path in enumerate(sorted(weights_path_list)):
        assert exists(weights_path)
        ensemble_weights_path = join(
            ensemble_path, 'classifier.canonical.%d.weights' % (index, ))
        ut.copy(weights_path, ensemble_weights_path)
        ensemble_weights_path_list.append(ensemble_weights_path)

    ensemble_weights_path_list = [ensemble_path] + ensemble_weights_path_list
    ut.archive_files(archive_path,
                     ensemble_weights_path_list,
                     overwrite=True,
                     common_prefix=True)

    return archive_path
Пример #9
0
def detector_train(ibs):
    results = ibs.localizer_train()
    localizer_weight_path, localizer_config_path, localizer_class_path = results
    classifier_model_path = ibs.classifier_binary_train()
    labeler_model_path = ibs.labeler_train()
    output_path = join(ibs.get_cachedir(), 'training', 'detector')
    ut.ensuredir(output_path)
    ut.copy(localizer_weight_path, join(output_path, 'localizer.weights'))
    ut.copy(localizer_config_path, join(output_path, 'localizer.config'))
    ut.copy(localizer_class_path, join(output_path, 'localizer.classes'))
    ut.copy(classifier_model_path, join(output_path, 'classifier.npy'))
    ut.copy(labeler_model_path, join(output_path, 'labeler.npy'))
Пример #10
0
def build_pyinstaller():
    """
    build_pyinstaller creates build/ibeis/* and dist/ibeis/*
    """
    print('[installer] +--- BUILD_PYINSTALLER ---')
    # 1) RUN: PYINSTALLER
    # Run the pyinstaller command (does all the work)
    utool_python_path = dirname(dirname(ut.__file__))
    #import os
    #os.environ['PYTHONPATH'] = os.pathsep.join([utool_python_path] + os.environ['PYTHONPATH'].strip(os.pathsep).split(os.pathsep))
    import os
    sys.path.insert(1, utool_python_path)
    if not ut.WIN32:
        pathcmd = 'export PYTHONPATH=%s%s$PYTHONPATH && ' % (utool_python_path,
                                                             os.pathsep)
    else:
        pathcmd = ''
    installcmd = '/opt/local/Library/Frameworks/Python.framework/Versions/2.7/bin/pyinstaller --runtime-hook _installers/rthook_pyqt4.py _installers/pyinstaller-ibeis.spec -y'
    output, err, ret = ut.cmd(pathcmd + installcmd)
    if ret != 0:
        raise AssertionError('Pyinstalled failed with return code = %r' %
                             (ret, ))
    #ut.cmd(installcmd)
    #ut.cmd('pyinstaller --runtime-hook rthook_pyqt4.py _installers/pyinstaller-ibeis.spec -y')
    #else:
    #ut.cmd('pyinstaller', '_installers/pyinstaller-ibeis.spec', '-y')
    #ut.cmd('pyinstaller', '--runtime-hook rthook_pyqt4.py', '_installers/pyinstaller-ibeis.spec')
    # 2) POST: PROCESSING
    # Perform some post processing steps on the mac

    if sys.platform == 'darwin' and exists('dist/IBEIS.app/Contents/'):
        copy_list = [
            ('ibsicon.icns', 'Resources/icon-windowed.icns'),
            ('Info.plist', 'Info.plist'),
        ]
        srcdir = '_installers'
        dstdir = 'dist/IBEIS.app/Contents/'
        for srcname, dstname in copy_list:
            src = join(srcdir, srcname)
            dst = join(dstdir, dstname)
            ut.copy(src, dst)
        # TODO: make this take arguments instead of defaulting to ~/code/ibeis/build
        #print("RUN: sudo ./_installers/mac_dmg_builder.sh")
    app_fpath = get_dist_app_fpath()
    print('[installer] app_fpath = %s' % (app_fpath, ))

    print('[installer] L___ FINISH BUILD_PYINSTALLER ___')
Пример #11
0
def database_backup(db_dir, db_fname, backup_dir, max_keep=MAX_KEEP, manual=True):
    fname, ext = splitext(db_fname)
    src_fpath = join(db_dir, db_fname)
    #now = datetime.datetime.now()
    now = datetime.datetime.utcnow()
    if manual:
        now_str = now.strftime('%Y_%m_%d_%H_%M_%S')
    else:
        now_str = now.strftime('%Y_%m_%d_00_00_00')
    #dst_fpath = join(backup_dir, '%s_backup_%s%s' % (fname, now_str, ext))
    dst_fname = ''.join((fname, '_backup_', now_str, ext))
    dst_fpath = join(backup_dir, dst_fname)
    if exists(src_fpath) and not exists(dst_fpath):
        print('[ensure_daily_database_backup] Daily backup of database: %r -> %r' % (src_fpath, dst_fpath, ))
        ut.copy(src_fpath, dst_fpath)
        # Clean-up old database backups
        remove_old_backups(backup_dir, ext, max_keep)
Пример #12
0
def canonical_localizer_train(ibs, species, ensembles=3, **kwargs):
    from ibeis_cnn.ingest_ibeis import get_cnn_localizer_canonical_training_images_pytorch
    from ibeis.algo.detect import canonical

    args = (species, )
    data_path = join(ibs.get_cachedir(),
                     'extracted-localizer-canonical-%s' % args)
    extracted_path = get_cnn_localizer_canonical_training_images_pytorch(
        ibs,
        species,
        dest_path=data_path,
    )

    weights_path_list = []
    for ensemble_num in range(ensembles):
        args = (
            species,
            ensemble_num,
        )
        output_path = join(ibs.get_cachedir(), 'training',
                           'localizer-canonical-%s-ensemble-%d' % args)
        weights_path = canonical.train(extracted_path, output_path)
        weights_path_list.append(weights_path)

    args = (species, )
    output_name = 'localizer.canonical.%s' % args
    ensemble_path = join(ibs.get_cachedir(), 'training', output_name)
    ut.ensuredir(ensemble_path)

    archive_path = '%s.zip' % (ensemble_path)
    ensemble_weights_path_list = []

    for index, weights_path in enumerate(sorted(weights_path_list)):
        assert exists(weights_path)
        ensemble_weights_path = join(
            ensemble_path, 'localizer.canonical.%d.weights' % (index, ))
        ut.copy(weights_path, ensemble_weights_path)
        ensemble_weights_path_list.append(ensemble_weights_path)

    ensemble_weights_path_list = [ensemble_path] + ensemble_weights_path_list
    ut.archive_files(archive_path,
                     ensemble_weights_path_list,
                     overwrite=True,
                     common_prefix=True)

    return archive_path
Пример #13
0
def build_pyinstaller():
    clean_pyinstaller()
    # Run the pyinstaller command (does all the work)
    utool.cmd('pyinstaller', '_installers/pyinstaller-ibeis.spec')
    # Perform some post processing steps on the mac
    if sys.platform == 'darwin' and exists("dist/IBEIS.app/Contents/"):
        copy_list = [
            ('ibsicon.icns', 'Resources/icon-windowed.icns'),
            ('Info.plist', 'Info.plist'),
        ]
        srcdir = '_installers'
        dstdir = 'dist/IBEIS.app/Contents/'
        for srcname, dstname in copy_list:
            src = join(srcdir, srcname)
            dst = join(dstdir, dstname)
            utool.copy(src, dst)
        print("RUN: ./_installers/mac_dmg_builder.sh")
Пример #14
0
def classifier_cameratrap_densenet_train(ibs,
                                         positive_imageset_id,
                                         negative_imageset_id,
                                         ensembles=3,
                                         **kwargs):
    from wbia.other.detectexport import (
        get_cnn_classifier_cameratrap_binary_training_images_pytorch, )
    from wbia.algo.detect import densenet

    data_path = join(ibs.get_cachedir(), 'extracted-classifier-cameratrap')
    extracted_path = get_cnn_classifier_cameratrap_binary_training_images_pytorch(
        ibs,
        positive_imageset_id,
        negative_imageset_id,
        dest_path=data_path,
        image_size=densenet.INPUT_SIZE,
        **kwargs,
    )

    weights_path_list = []
    for ensemble_num in range(ensembles):
        args = (ensemble_num, )
        output_path = join(ibs.get_cachedir(), 'training',
                           'classifier-cameratrap-ensemble-%d' % args)
        weights_path = densenet.train(extracted_path,
                                      output_path,
                                      blur=True,
                                      flip=True)
        weights_path_list.append(weights_path)

    archive_name = 'classifier.cameratrap.zip'
    archive_path = join(ibs.get_cachedir(), 'training', archive_name)
    ensemble_weights_path_list = []

    for index, weights_path in enumerate(sorted(weights_path_list)):
        assert exists(weights_path)
        ensemble_weights_path = 'classifier.cameratrap.%d.weights' % (index, )
        ut.copy(weights_path, ensemble_weights_path)
        ensemble_weights_path_list.append(ensemble_weights_path)

    ut.archive_files(archive_path,
                     ensemble_weights_path_list,
                     overwrite=True,
                     common_prefix=True)

    return archive_path
Пример #15
0
def build_pyinstaller():
    """
    build_pyinstaller creates build/ibeis/* and dist/ibeis/*
    """
    print('[installer] +--- BUILD_PYINSTALLER ---')
    # 1) RUN: PYINSTALLER
    # Run the pyinstaller command (does all the work)
    utool_python_path = dirname(dirname(ut.__file__))
    #import os
    #os.environ['PYTHONPATH'] = os.pathsep.join([utool_python_path] + os.environ['PYTHONPATH'].strip(os.pathsep).split(os.pathsep))
    import os
    sys.path.insert(1, utool_python_path)
    if not ut.WIN32:
        pathcmd = 'export PYTHONPATH=%s%s$PYTHONPATH && ' % (utool_python_path, os.pathsep)
    else:
        pathcmd = ''
    installcmd = 'pyinstaller --runtime-hook rthook_pyqt4.py _installers/pyinstaller-ibeis.spec -y'
    output, err, ret = ut.cmd(pathcmd + installcmd)
    if ret != 0:
        raise AssertionError('Pyinstalled failed with return code = %r' % (ret,))
    #ut.cmd(installcmd)
    #ut.cmd('pyinstaller --runtime-hook rthook_pyqt4.py _installers/pyinstaller-ibeis.spec -y')
    #else:
    #ut.cmd('pyinstaller', '_installers/pyinstaller-ibeis.spec', '-y')
    #ut.cmd('pyinstaller', '--runtime-hook rthook_pyqt4.py', '_installers/pyinstaller-ibeis.spec')
    # 2) POST: PROCESSING
    # Perform some post processing steps on the mac

    if sys.platform == 'darwin' and exists('dist/IBEIS.app/Contents/'):
        copy_list = [
            ('ibsicon.icns', 'Resources/icon-windowed.icns'),
            ('Info.plist', 'Info.plist'),
        ]
        srcdir = '_installers'
        dstdir = 'dist/IBEIS.app/Contents/'
        for srcname, dstname in copy_list:
            src = join(srcdir, srcname)
            dst = join(dstdir, dstname)
            ut.copy(src, dst)
        # TODO: make this take arguments instead of defaulting to ~/code/ibeis/build
        #print("RUN: sudo ./_installers/mac_dmg_builder.sh")
    app_fpath = get_dist_app_fpath()
    print('[installer] app_fpath = %s' % (app_fpath,))

    print('[installer] L___ FINISH BUILD_PYINSTALLER ___')
Пример #16
0
def copy_ibeisdb(source_dbdir, dest_dbdir):
    # TODO; rectify with rsycn script
    from os.path import normpath
    import ibeis
    exclude_dirs = [ut.ensure_unixslash(normpath(rel)) for rel in ibeis.const.EXCLUDE_COPY_REL_DIRS + ['_hsdb', '.hs_internals']]

    rel_tocopy = ut.glob(source_dbdir, '*', exclude_dirs=exclude_dirs, recursive=True, with_files=True, with_dirs=False, fullpath=False)
    rel_tocopy_dirs = ut.glob(source_dbdir, '*', exclude_dirs=exclude_dirs, recursive=True, with_files=False, with_dirs=True, fullpath=False)

    src_list = [join(source_dbdir, relpath) for relpath in rel_tocopy]
    dst_list = [join(dest_dbdir, relpath) for relpath in rel_tocopy]

    # ensure directories exist
    rel_tocopy_dirs = [dest_dbdir] + [join(dest_dbdir, dpath_) for dpath_ in rel_tocopy_dirs]
    for dpath in rel_tocopy_dirs:
        ut.ensuredir(dpath)
    # copy files
    ut.copy(src_list, dst_list)
Пример #17
0
def ensure_local_war(verbose=ut.NOT_QUIET):
    """
    Ensures tomcat has been unpacked and the war is localized

    CommandLine:
        ibeis ensure_local_war

    Example:
        >>> # SCRIPT
        >>> from ibeis.control.wildbook_manager import *  # NOQA
        >>> result = ensure_local_war()
        >>> print(result)
    """
    # TODO: allow custom specified tomcat directory
    try:
        output = subprocess.check_output(['java', '-version'],
                                         stderr=subprocess.STDOUT)
        _java_version = output.split('\n')[0]
        _java_version = _java_version.replace('java version ', '')
        java_version = _java_version.replace('"', '')
        print('java_version = %r' % (java_version, ))
        if not java_version.startswith('1.7'):
            print('Warning wildbook is only supported for java 1.7')
    except OSError:
        output = None
    if output is None:
        raise ImportError(
            'Cannot find java on this machine. '
            'Please install java: http://www.java.com/en/download/')

    tomcat_dpath = find_or_download_tomcat()
    assert tomcat_dpath is not None, 'Could not find tomcat'
    redownload = ut.get_argflag('--redownload-war')
    war_fpath = find_or_download_wilbook_warfile(redownload=redownload)
    war_fname = basename(war_fpath)

    # Move the war file to tomcat webapps if not there
    webapps_dpath = join(tomcat_dpath, 'webapps')
    deploy_war_fpath = join(webapps_dpath, war_fname)
    if not ut.checkpath(deploy_war_fpath, verbose=verbose):
        ut.copy(war_fpath, deploy_war_fpath)

    wb_target = splitext(war_fname)[0]
    return tomcat_dpath, webapps_dpath, wb_target
Пример #18
0
def ensure_local_war(verbose=ut.NOT_QUIET):
    """
    Ensures tomcat has been unpacked and the war is localized

    CommandLine:
        ibeis ensure_local_war

    Example:
        >>> # SCRIPT
        >>> from ibeis.control.wildbook_manager import *  # NOQA
        >>> result = ensure_local_war()
        >>> print(result)
    """
    # TODO: allow custom specified tomcat directory
    try:
        output = subprocess.check_output(['java', '-version'],
                                         stderr=subprocess.STDOUT)
        _java_version = output.split('\n')[0]
        _java_version = _java_version.replace('java version ', '')
        java_version = _java_version.replace('"', '')
        print('java_version = %r' % (java_version,))
        if not java_version.startswith('1.7'):
            print('Warning wildbook is only supported for java 1.7')
    except OSError:
        output = None
    if output is None:
        raise ImportError(
            'Cannot find java on this machine. '
            'Please install java: http://www.java.com/en/download/')

    tomcat_dpath = find_or_download_tomcat()
    assert tomcat_dpath is not None, 'Could not find tomcat'
    redownload = ut.get_argflag('--redownload-war')
    war_fpath = find_or_download_wilbook_warfile(redownload=redownload)
    war_fname = basename(war_fpath)

    # Move the war file to tomcat webapps if not there
    webapps_dpath = join(tomcat_dpath, 'webapps')
    deploy_war_fpath = join(webapps_dpath, war_fname)
    if not ut.checkpath(deploy_war_fpath, verbose=verbose):
        ut.copy(war_fpath, deploy_war_fpath)

    wb_target = splitext(war_fname)[0]
    return tomcat_dpath, webapps_dpath, wb_target
Пример #19
0
    def publish(dstcnvs_normer, cachedir=None):
        """
        Sets this as the default normalizer available for download
        ONLY DEVELOPERS CAN PERFORM THIS OPERATION

        Args:
            cachedir (str):

        CommandLine:
            python -m wbia.algo.hots.distinctiveness_normalizer --test-publish

        Example:
            >>> # DISABLE_DOCTEST
            >>> from wbia.algo.hots.distinctiveness_normalizer import *  # NOQA
            >>> dstcnvs_normer = testdata_distinctiveness()[0]
            >>> dstcnvs_normer.rebuild()
            >>> dstcnvs_normer.save()
            >>> result = dstcnvs_normer.publish(cachedir)
            >>> # verify results
            >>> print(result)
        """
        from os.path import basename, join

        assert ut.is_developer(), 'ONLY DEVELOPERS CAN PERFORM THIS OPERATION'
        cachedir = dstcnvs_normer.cachedir if cachedir is None else cachedir
        archive_fpath = dstcnvs_normer.archive(cachedir, overwrite=True)
        archive_fname = basename(archive_fpath)
        publish_dpath = PUBLISH_DIR
        publish_fpath = join(publish_dpath, archive_fname)
        if ut.checkpath(publish_fpath, verbose=True):
            logger.info('Overwriting model')
            logger.info(
                'old nBytes(publish_fpath) = %s'
                % (ut.get_file_nBytes_str(publish_fpath),)
            )
            logger.info(
                'new nBytes(archive_fpath) = %s'
                % (ut.get_file_nBytes_str(archive_fpath),)
            )
        else:
            logger.info('Publishing model')
        logger.info('publish_fpath = %r' % (publish_fpath,))
        ut.copy(archive_fpath, publish_fpath)
Пример #20
0
def train_part_detector():
    """
    Problem:
        healthy sharks usually have a mostly whole body shot
        injured sharks usually have a close up shot.
        This distribution of images is likely what the injur-shark net is picking up on.

    The goal is to train a detector that looks for things that look
    like the distribution of injured sharks.

    We will run this on healthy sharks to find the parts of
    """
    import wbia

    ibs = wbia.opendb('WS_ALL')
    imgset = ibs.imagesets(text='Injured Sharks')
    injured_annots = imgset.annots[0]  # NOQA

    # config = {
    #    'dim_size': (224, 224),
    #    'resize_dim': 'wh'
    # }

    from pydarknet import Darknet_YOLO_Detector

    data_path = ibs.export_to_xml()
    output_path = join(ibs.get_cachedir(), 'training', 'localizer')
    ut.ensuredir(output_path)
    dark = Darknet_YOLO_Detector()
    results = dark.train(data_path, output_path)
    del dark

    localizer_weight_path, localizer_config_path, localizer_class_path = results
    classifier_model_path = ibs.classifier_train()
    labeler_model_path = ibs.labeler_train()
    output_path = join(ibs.get_cachedir(), 'training', 'detector')
    ut.ensuredir(output_path)
    ut.copy(localizer_weight_path, join(output_path, 'localizer.weights'))
    ut.copy(localizer_config_path, join(output_path, 'localizer.config'))
    ut.copy(localizer_class_path, join(output_path, 'localizer.classes'))
    ut.copy(classifier_model_path, join(output_path, 'classifier.npy'))
    ut.copy(labeler_model_path, join(output_path, 'labeler.npy'))
Пример #21
0
def testdata_ensure_unconverted_hsdb():
    r"""
    Makes an unconverted test datapath

    CommandLine:
        python -m ibeis.dbio.ingest_hsdb --test-testdata_ensure_unconverted_hsdb

    Example:
        >>> # SCRIPT
        >>> from ibeis.dbio.ingest_hsdb import *  # NOQA
        >>> result = testdata_ensure_unconverted_hsdb()
        >>> print(result)
    """
    import utool as ut
    assert ut.is_developer(), 'dev function only'
    # Make an unconverted test database
    ut.ensurepath('/raid/tests/tmp')
    ut.delete('/raid/tests/tmp/Frogs')
    ut.copy('/raid/tests/Frogs', '/raid/tests/tmp/Frogs')
    hsdb_dir = '/raid/tests/tmp/Frogs'
    return hsdb_dir
Пример #22
0
def testdata_ensure_unconverted_hsdb():
    r"""
    Makes an unconverted test datapath

    CommandLine:
        python -m ibeis.dbio.ingest_hsdb --test-testdata_ensure_unconverted_hsdb

    Example:
        >>> # SCRIPT
        >>> from ibeis.dbio.ingest_hsdb import *  # NOQA
        >>> result = testdata_ensure_unconverted_hsdb()
        >>> print(result)
    """
    import utool as ut
    assert ut.is_developer(), 'dev function only'
    # Make an unconverted test database
    ut.ensurepath('/raid/tests/tmp')
    ut.delete('/raid/tests/tmp/Frogs')
    ut.copy('/raid/tests/Frogs', '/raid/tests/tmp/Frogs')
    hsdb_dir = '/raid/tests/tmp/Frogs'
    return hsdb_dir
Пример #23
0
    def publish(dstcnvs_normer, cachedir=None):
        """
        Sets this as the default normalizer available for download
        ONLY DEVELOPERS CAN PERFORM THIS OPERATION

        Args:
            cachedir (str):

        CommandLine:
            python -m ibeis.algo.hots.distinctiveness_normalizer --test-publish

        Example:
            >>> # DISABLE_DOCTEST
            >>> from ibeis.algo.hots.distinctiveness_normalizer import *  # NOQA
            >>> dstcnvs_normer = testdata_distinctiveness()[0]
            >>> dstcnvs_normer.rebuild()
            >>> dstcnvs_normer.save()
            >>> result = dstcnvs_normer.publish(cachedir)
            >>> # verify results
            >>> print(result)
        """
        from os.path import basename, join
        assert ut.is_developer(), 'ONLY DEVELOPERS CAN PERFORM THIS OPERATION'
        cachedir      = dstcnvs_normer.cachedir if cachedir is None else cachedir
        archive_fpath = dstcnvs_normer.archive(cachedir, overwrite=True)
        archive_fname = basename(archive_fpath)
        publish_dpath = PUBLISH_DIR
        publish_fpath = join(publish_dpath, archive_fname)
        if ut.checkpath(publish_fpath, verbose=True):
            print('Overwriting model')
            print('old nBytes(publish_fpath) = %s' %
                  (ut.get_file_nBytes_str(publish_fpath),))
            print('new nBytes(archive_fpath) = %s' %
                  (ut.get_file_nBytes_str(archive_fpath),))
        else:
            print('Publishing model')
        print('publish_fpath = %r' % (publish_fpath,))
        ut.copy(archive_fpath, publish_fpath)
Пример #24
0
def ensure_ctags_win32():
    import utool as ut
    from os.path import join

    dpath = ut.grab_zipped_url("http://prdownloads.sourceforge.net/ctags/ctags58.zip")
    ctags_fname = "ctags.exe"
    ctags_src = join(dpath, ctags_fname)

    def find_mingw_bin():
        pathdirs = ut.get_path_dirs()
        copydir = None
        # hueristic for finding mingw bin
        for pathdir in pathdirs:
            pathdir_ = pathdir.lower()
            ismingwbin = pathdir_.find("mingw") > -1 and pathdir_.endswith("bin")
            if ismingwbin:
                issmaller = copydir is None or len(pathdir) < len(copydir)
                if issmaller:
                    copydir = pathdir
        return copydir

    copydir = find_mingw_bin()
    ctags_dst = join(copydir, ctags_fname)
    ut.copy(ctags_src, ctags_dst, overwrite=False)
Пример #25
0
def export(ibs, aid_pairs=None):
    """
    3 - 4 different animals
    2 views of each
    matching keypoint coordinates on each annotation
    """
    if aid_pairs is None:
        if ibs.get_dbname() == 'PZ_MOTHERS':
            aid_pair_list = MOTHERS_VIEWPOINT_EXPORT_PAIRS
        if ibs.get_dbname() == 'GZ_ALL':
            aid_pair_list = GZ_VIEWPOINT_EXPORT_PAIRS
    ibs.update_query_cfg(ratio_thresh=1.6)
    export_path = expanduser('~/Dropbox/Assignments/dataset')
    #utool.view_directory(export_path)
    # MOTHERS EG:
    for aid_pair in aid_pair_list:
        qaid2_qres = ibs.query_intra_encounter(aid_pair)
        #ibeis.viz.show_qres(ibs, qaid2_qres.values()[1]); df2.iup()
        mrids_list = []
        mkpts_list = []
        for qaid, qres in six.iteritems(qaid2_qres):
            print('Getting kpts from %r' % qaid)
            #qres.show_top(ibs)
            posrid_list = utool.ensure_iterable(qres.get_classified_pos())
            mrids_list.extend([(qaid, posrid) for posrid in posrid_list])
            mkpts_list.extend(qres.get_matching_keypoints(ibs, posrid_list))

        mkey2_kpts = {}
        for mrids_tup, mkpts_tup in zip(mrids_list, mkpts_list):
            assert len(mrids_tup) == 2, 'must be a match tuple'
            mrids_ = np.array(mrids_tup)
            sortx = mrids_.argsort()
            mrids_ = mrids_[sortx]
            mkpts_ = np.array(mkpts_tup)[sortx]
            if sortx[0] == 0:
                pass
            mkey = tuple(mrids_.tolist())
            try:
                kpts_list = mkey2_kpts[mkey]
                print('append to mkey=%r' % (mkey,))
            except KeyError:
                print('new mkey=%r' % (mkey,))
                kpts_list = []
            kpts_list.append(mkpts_)
            mkey2_kpts[mkey] = kpts_list

        mkeys_list = mkey2_kpts.keys()
        mkeys_keypoints = mkey2_kpts.values()

        for mkeys, mkpts_list in zip(mkeys_list, mkeys_keypoints):
            print(mkeys)
            print(len(kpts_list))
            kpts1_m = np.vstack([mkpts[0] for mkpts in mkpts_list])
            kpts2_m = np.vstack([mkpts[1] for mkpts in mkpts_list])
            match_lines = [
                repr(
                    (
                        tuple(kp1[ktool.LOC_DIMS].tolist()),
                        tuple(kp2[ktool.LOC_DIMS].tolist()),
                    )
                ) + ', '
                for kp1, kp2 in zip(kpts1_m, kpts2_m)]

            mcpaths_list = ibs.get_annot_cpaths(mkeys)
            fnames_list = map(lambda x: split(x)[1], mcpaths_list)
            for path in mcpaths_list:
                utool.copy(path, export_path)

            header_lines = ['# Exported keypoint matches (might be duplicates matches)',
                            '# matching_aids = %r' % (mkey,)]
            header_lines += ['# img%d = %r' % (count, fname) for count, fname in enumerate(fnames_list)]
            header_lines += ['# LINE FORMAT: match_pts = [(img1_xy, img2_xy) ... ]']
            header_text = '\n'.join(header_lines)
            match_text  = '\n'.join(['match_pts = ['] + match_lines + [']'])
            matchfile_text = '\n'.join([header_text, match_text])
            matchfile_name = ('match_aids(%d,%d).txt' % mkey)
            matchfile_path = join(export_path, matchfile_name)
            utool.write_to(matchfile_path, matchfile_text)
            print(header_text)
            print(utool.truncate_str(match_text, maxlen=500))
Пример #26
0
def localizer_lightnet_train(
    ibs,
    species_list,
    cuda_device='0',
    batches=60000,
    validate_with_accuracy=True,
    deploy_tag=None,
    cleanup=True,
    cleanup_all=True,
    deploy=True,
    cache_species_str=None,
    **kwargs,
):
    from wbia.algo.detect import lightnet
    import subprocess
    import datetime
    import math
    import sys

    assert species_list is not None
    species_list = sorted(species_list)

    lightnet_training_kit_url = lightnet._download_training_kit()
    _localizer_lightnet_validate_training_kit(lightnet_training_kit_url)

    hashstr = ut.random_nonce()[:16]
    if cache_species_str is None:
        cache_species_str = '-'.join(species_list)

    cache_path = join(ibs.cachedir, 'training', 'lightnet')
    ut.ensuredir(cache_path)
    training_instance_folder = 'lightnet-training-%s-%s' % (cache_species_str,
                                                            hashstr)
    training_instance_path = join(cache_path, training_instance_folder)
    ut.copy(lightnet_training_kit_url, training_instance_path)

    backup_path = join(training_instance_path, 'backup')
    bin_path = join(training_instance_path, 'bin')
    cfg_path = join(training_instance_path, 'cfg')
    data_path = join(training_instance_path, 'data')
    deploy_path = join(training_instance_path, 'deploy')
    weights_path = join(training_instance_path, 'darknet19_448.conv.23.pt')
    results_path = join(training_instance_path, 'results.txt')
    dataset_py_path = join(bin_path, 'dataset.template.py')
    labels_py_path = join(bin_path, 'labels.template.py')
    test_py_path = join(bin_path, 'test.template.py')
    train_py_path = join(bin_path, 'train.template.py')
    config_py_path = join(cfg_path, 'yolo.template.py')

    ibs.export_to_xml(species_list=species_list,
                      output_path=data_path,
                      **kwargs)

    species_str_list = ['%r' % (species, ) for species in species_list]
    species_str = ', '.join(species_str_list)
    replace_dict = {
        '_^_YEAR_^_': str(datetime.datetime.now().year),
        '_^_DATA_ROOT_^_': data_path,
        '_^_SPECIES_MAPPING_^_': species_str,
        '_^_NUM_BATCHES_^_': str(batches),
    }

    dataset_py_path = _localizer_lightnet_template_replace(
        dataset_py_path, replace_dict)
    labels_py_path = _localizer_lightnet_template_replace(
        labels_py_path, replace_dict)
    test_py_path = _localizer_lightnet_template_replace(
        test_py_path, replace_dict)
    train_py_path = _localizer_lightnet_template_replace(
        train_py_path, replace_dict)
    config_py_path = _localizer_lightnet_template_replace(
        config_py_path, replace_dict)
    assert exists(dataset_py_path)
    assert exists(labels_py_path)
    assert exists(test_py_path)
    assert exists(train_py_path)
    assert exists(config_py_path)
    assert not exists(backup_path)
    assert not exists(results_path)

    python_exe = sys.executable
    cuda_str = ('' if cuda_device in [-1, None] or len(cuda_device) == 0 else
                'CUDA_VISIBLE_DEVICES=%s ' % (cuda_device, ))

    # Call labels
    call_str = '%s %s' % (python_exe, labels_py_path)
    logger.info(call_str)
    subprocess.call(call_str, shell=True)

    # Call training
    # Example: CUDA_VISIBLE_DEVICES=X python bin/train.py -c -n cfg/yolo.py -c darknet19_448.conv.23.pt
    args = (
        cuda_str,
        python_exe,
        train_py_path,
        config_py_path,
        backup_path,
        weights_path,
    )
    call_str = '%s%s %s -c -n %s -b %s %s' % args
    logger.info(call_str)
    subprocess.call(call_str, shell=True)
    assert exists(backup_path)
    """
    x = (
        'CUDA_VISIBLE_DEVICES=0 ',
        '/virtualenv/env3/bin/python',
        '/data/db/_ibsdb/_ibeis_cache/training/lightnet/lightnet-training-right_whale-right_whale+body-right_whale+fluke-right_whale+head-right_whale+peduncle-a36054bf78166a05/bin/test.py',
        '/data/db/_ibsdb/_ibeis_cache/training/lightnet/lightnet-training-right_whale-right_whale+body-right_whale+fluke-right_whale+head-right_whale+peduncle-a36054bf78166a05/cfg/yolo.py',
        '/data/db/_ibsdb/_ibeis_cache/training/lightnet/lightnet-training-right_whale-right_whale+body-right_whale+fluke-right_whale+head-right_whale+peduncle-a36054bf78166a05/results.txt',
        '/data/db/_ibsdb/_ibeis_cache/training/lightnet/lightnet-training-right_whale-right_whale+body-right_whale+fluke-right_whale+head-right_whale+peduncle-a36054bf78166a05/backup',
        True,
        '/data/db/_ibsdb/_ibeis_cache/training/lightnet/lightnet-training-right_whale-right_whale+body-right_whale+fluke-right_whale+head-right_whale+peduncle-a36054bf78166a05/deploy',
        True,
        None,
        False,
        True,
        '/data/db/_ibsdb/_ibeis_cache/training/lightnet/lightnet-training-right_whale-right_whale+body-right_whale+fluke-right_whale+head-right_whale+peduncle-a36054bf78166a05/bin',
        '/data/db/_ibsdb/_ibeis_cache/training/lightnet/lightnet-training-right_whale-right_whale+body-right_whale+fluke-right_whale+head-right_whale+peduncle-a36054bf78166a05/cfg',
        '/data/db/_ibsdb/_ibeis_cache/training/lightnet/lightnet-training-right_whale-right_whale+body-right_whale+fluke-right_whale+head-right_whale+peduncle-a36054bf78166a05/data',
        '/data/db/_ibsdb/_ibeis_cache/training/lightnet/lightnet-training-right_whale-right_whale+body-right_whale+fluke-right_whale+head-right_whale+peduncle-a36054bf78166a05/darknet19_448.conv.23.pt',
        'wilddog'
    )

    cuda_str, python_exe, test_py_path, config_py_path, results_path, backup_path, validate_with_accuracy, deploy_path, deploy, deploy_tag, cleanup, cleanup_all, bin_path, cfg_path, data_path, weights_path, cache_species_str = x

    call_str = 'CUDA_VISIBLE_DEVICES=3 /home/jason.parham/virtualenv/wildme3.6/bin/python /data/db/_ibsdb/_ibeis_cache/training/lightnet/lightnet-training-right_whale-right_whale+body-right_whale+fluke-right_whale+head-right_whale+peduncle-a36054bf78166a05/bin/test.py -c -n /data/db/_ibsdb/_ibeis_cache/training/lightnet/lightnet-training-right_whale-right_whale+body-right_whale+fluke-right_whale+head-right_whale+peduncle-a36054bf78166a05/cfg/yolo.py --results /data/db/_ibsdb/_ibeis_cache/training/lightnet/lightnet-training-right_whale-right_whale+body-right_whale+fluke-right_whale+head-right_whale+peduncle-a36054bf78166a05/results.txt /data/db/_ibsdb/_ibeis_cache/training/lightnet/lightnet-training-right_whale-right_whale+body-right_whale+fluke-right_whale+head-right_whale+peduncle-a36054bf78166a05/backup/*'
    """

    # Call testing
    # Example: CUDA_VISIBLE_DEVICE=X python bin/test.py -c -n cfg/yolo.py
    args = (
        cuda_str,
        python_exe,
        test_py_path,
        config_py_path,
        results_path,
        backup_path,
    )
    call_str = '%s%s %s -c -n %s --results %s %s/*' % args
    logger.info(call_str)
    subprocess.call(call_str, shell=True)
    assert exists(results_path)

    # Validate results
    with open(results_path, 'r') as results_file:
        line_list = results_file.readlines()

    if len(line_list) < 10:
        logger.info('VALIDATION ERROR!')
        ut.embed()

    result_list = []
    for line in line_list:
        # logger.info(line)
        line = line.strip().split(',')
        if len(line) != 3:
            continue
        model_path, loss, accuracy = line
        loss = float(loss)
        accuracy = float(accuracy)
        if math.isnan(accuracy):
            continue
        miss_rate = (100.0 - accuracy) / 100.0
        if validate_with_accuracy:
            assert not math.isnan(miss_rate)
            result = (miss_rate, loss, model_path)
        else:
            assert not math.isnan(loss)
            result = (loss, miss_rate, model_path)
        logger.info('\t%r' % (result, ))
        result_list.append(result)
    result_list = sorted(result_list)

    best_result = result_list[0]
    best_model_filepath = best_result[-1]

    # Copy best model, delete the rest
    ut.ensuredir(deploy_path)
    deploy_model_filepath = join(deploy_path, 'detect.lightnet.weights')
    deploy_config_filepath = join(deploy_path, 'detect.lightnet.py')
    ut.copy(best_model_filepath, deploy_model_filepath)
    ut.copy(config_py_path, deploy_config_filepath)

    # Cleanup
    if cleanup:
        ut.delete(backup_path)
        ut.delete(results_path)

        if cleanup_all:
            ut.delete(bin_path)
            ut.delete(cfg_path)
            ut.delete(data_path)
            ut.delete(weights_path)

    # Deploy
    final_path = join('/', 'data', 'public', 'models')
    if deploy:
        assert exists(final_path), 'Cannot deploy the model on this machine'
        if deploy_tag is None:
            deploy_tag = cache_species_str

        counter = 0
        while True:
            final_config_prefix = 'detect.lightnet.%s.v%d' % (deploy_tag,
                                                              counter)
            final_config_filename = '%s.py' % (final_config_prefix, )
            final_config_filepath = join(final_path, final_config_filename)
            if not exists(final_config_filepath):
                break
            counter += 1

        final_model_filename = '%s.weights' % (final_config_prefix, )
        final_model_filepath = join(final_path, final_model_filename)

        assert not exists(final_model_filepath)
        assert not exists(final_config_filepath)

        ut.copy(deploy_model_filepath, final_model_filepath)
        ut.copy(deploy_config_filepath, final_config_filepath)

        retval = (
            final_model_filepath,
            final_config_filepath,
        )
    else:
        retval = (
            deploy_model_filepath,
            deploy_config_filepath,
        )

    return retval
Пример #27
0
def export(ibs, aid_pairs=None):
    """
    3 - 4 different animals
    2 views of each
    matching keypoint coordinates on each annotation
    """
    if aid_pairs is None:
        if ibs.get_dbname() == 'PZ_MOTHERS':
            aid_pair_list = MOTHERS_VIEWPOINT_EXPORT_PAIRS
        if ibs.get_dbname() == 'GZ_ALL':
            aid_pair_list = GZ_VIEWPOINT_EXPORT_PAIRS
    ibs.update_query_cfg(ratio_thresh=1.6)
    export_path = expanduser('~/Dropbox/Assignments/dataset')
    #utool.view_directory(export_path)
    # MOTHERS EG:
    for aid_pair in aid_pair_list:
        cm_list, qreq_ = ibs.query_chips(aid_pair, aid_pair)
        #ibeis.viz.show_qres(ibs, qaid2_qres.values()[1]); df2.iup()
        mrids_list = []
        mkpts_list = []
        for cm in cm_list:
            qaid = cm.qaid
            print('Getting kpts from %r' % qaid)
            #cm.show_top(ibs)
            posrid_list = utool.ensure_iterable(cm.get_classified_pos())
            mrids_list.extend([(qaid, posrid) for posrid in posrid_list])
            mkpts_list.extend(cm.get_matching_keypoints(ibs, posrid_list))

        mkey2_kpts = {}
        for mrids_tup, mkpts_tup in zip(mrids_list, mkpts_list):
            assert len(mrids_tup) == 2, 'must be a match tuple'
            mrids_ = np.array(mrids_tup)
            sortx = mrids_.argsort()
            mrids_ = mrids_[sortx]
            mkpts_ = np.array(mkpts_tup)[sortx]
            if sortx[0] == 0:
                pass
            mkey = tuple(mrids_.tolist())
            try:
                kpts_list = mkey2_kpts[mkey]
                print('append to mkey=%r' % (mkey, ))
            except KeyError:
                print('new mkey=%r' % (mkey, ))
                kpts_list = []
            kpts_list.append(mkpts_)
            mkey2_kpts[mkey] = kpts_list

        mkeys_list = mkey2_kpts.keys()
        mkeys_keypoints = mkey2_kpts.values()

        for mkeys, mkpts_list in zip(mkeys_list, mkeys_keypoints):
            print(mkeys)
            print(len(kpts_list))
            kpts1_m = np.vstack([mkpts[0] for mkpts in mkpts_list])
            kpts2_m = np.vstack([mkpts[1] for mkpts in mkpts_list])
            match_lines = [
                repr((
                    tuple(kp1[ktool.LOC_DIMS].tolist()),
                    tuple(kp2[ktool.LOC_DIMS].tolist()),
                )) + ', ' for kp1, kp2 in zip(kpts1_m, kpts2_m)
            ]

            mcpaths_list = ibs.get_annot_chip_fpath(mkeys)
            fnames_list = list(map(lambda x: split(x)[1], mcpaths_list))
            for path in mcpaths_list:
                utool.copy(path, export_path)

            header_lines = [
                '# Exported keypoint matches (might be duplicates matches)',
                '# matching_aids = %r' % (mkey, )
            ]
            header_lines += [
                '# img%d = %r' % (count, fname)
                for count, fname in enumerate(fnames_list)
            ]
            header_lines += [
                '# LINE FORMAT: match_pts = [(img1_xy, img2_xy) ... ]'
            ]
            header_text = '\n'.join(header_lines)
            match_text = '\n'.join(['match_pts = ['] + match_lines + [']'])
            matchfile_text = '\n'.join([header_text, match_text])
            matchfile_name = ('match_aids(%d,%d).txt' % mkey)
            matchfile_path = join(export_path, matchfile_name)
            utool.write_to(matchfile_path, matchfile_text)
            print(header_text)
            print(utool.truncate_str(match_text, maxlen=500))
Пример #28
0
def netrun():
    r"""
    CommandLine:
        # --- UTILITY
        python -m ibeis_cnn --tf get_juction_dpath --show

        # --- DATASET BUILDING ---
        # Build Dataset Aliases
        python -m ibeis_cnn --tf netrun --db PZ_MTEST --acfg ctrl --ensuredata --show
        python -m ibeis_cnn --tf netrun --db PZ_Master1 --acfg timectrl --ensuredata
        python -m ibeis_cnn --tf netrun --db PZ_Master1 --acfg timectrl:pername=None --ensuredata
        python -m ibeis_cnn --tf netrun --db PZ_Master1 --acfg timectrl:pername=None --ensuredata
        python -m ibeis_cnn --tf netrun --db mnist --ensuredata --show
        python -m ibeis_cnn --tf netrun --db mnist --ensuredata --show --datatype=category
        python -m ibeis_cnn --tf netrun --db mnist --ensuredata --show --datatype=siam-patch

        python -m ibeis_cnn --tf netrun --db PZ_Master1 --acfg ctrl:pername=None,excluderef=False,contributor_contains=FlankHack --ensuredata --show --datatype=siam-part

        # Parts based datasets
        python -m ibeis_cnn --tf netrun --db PZ_MTEST --acfg ctrl --datatype=siam-part --ensuredata --show

        % Patch based dataset (big one)
        python -m ibeis_cnn --tf netrun --db PZ_Master1 --acfg default:is_known=True,qmin_pername=2,view=primary,species=primary,minqual=ok --ensuredata --show --vtd
        python -m ibeis_cnn --tf netrun --ds pzm4 --weights=new --arch=siaml2_128 --train --monitor
        python -m ibeis_cnn --tf netrun --ds pzm4 --arch=siaml2_128 --test
        python -m ibeis_cnn --tf netrun --ds pzm4 --arch=siaml2_128 --veryverbose --no-flask

        # --- TRAINING ---
        python -m ibeis_cnn --tf netrun --db PZ_Master1 --acfg default:is_known=True,qmin_pername=2,view=primary,species=primary,minqual=ok --weights=new --arch=siaml2_128 --train --monitor

        python -m ibeis_cnn --tf netrun --ds timectrl_pzmaster1 --acfg ctrl:pername=None,excluderef=False,contributor_contains=FlankHack --train --weights=new --arch=siaml2_128  --monitor  # NOQA
        python -m ibeis_cnn --tf netrun --ds timectrl_pzmaster1 --acfg ctrl:pername=None,excluderef=False --train --weights=new --arch=siaml2_128  --monitor  # NOQA
        python -m ibeis_cnn --tf netrun --ds pzmtest --weights=new --arch=siaml2_128 --train --monitor --DEBUG_AUGMENTATION
        python -m ibeis_cnn --tf netrun --ds pzmtest --weights=new --arch=siaml2_128 --train --monitor

        python -m ibeis_cnn --tf netrun --ds flankhack --weights=new --arch=siaml2_partmatch --train --monitor --learning_rate=.00001
        python -m ibeis_cnn --tf netrun --ds flankhack --weights=new --arch=siam_deepfaceish --train --monitor --learning_rate=.00001

        # Different ways to train mnist
        python -m ibeis_cnn --tf netrun --db mnist --weights=new --arch=mnist_siaml2 --train --monitor --datatype=siam-patch
        python -m ibeis_cnn --tf netrun --db mnist --weights=new --arch=mnist-category --train --monitor --datatype=category

        # --- INITIALIZED-TRAINING ---
        python -m ibeis_cnn --tf netrun --ds pzmtest --arch=siaml2_128 --weights=gz-gray:current --train --monitor

        # --- TESTING ---
        python -m ibeis_cnn --tf netrun --db liberty --weights=liberty:current --arch=siaml2_128 --test
        python -m ibeis_cnn --tf netrun --db PZ_Master0 --weights=combo:current --arch=siaml2_128 --testall

    Example:
        >>> # DISABLE_DOCTEST
        >>> from ibeis_cnn.netrun import *  # NOQA
        >>> netrun()
        >>> ut.show_if_requested()
    """
    ut.colorprint('[netrun] NET RUN', 'red')

    requests, hyperparams, tags = parse_args()
    ds_tag         = tags['ds_tag']
    datatype       = tags['datatype']
    extern_ds_tag  = tags['extern_ds_tag']
    arch_tag       = tags['arch_tag']
    checkpoint_tag = tags['checkpoint_tag']

    # ----------------------------
    # Choose the main dataset
    ut.colorprint('[netrun] Ensuring Dataset', 'yellow')
    dataset = ingest_data.grab_dataset(ds_tag, datatype)
    if extern_ds_tag is not None:
        extern_dpath = ingest_data.get_extern_training_dpath(extern_ds_tag)
    else:
        extern_dpath = None

    print('dataset.training_dpath = %r' % (dataset.training_dpath,))

    print('Dataset Alias Key: %r' % (dataset.alias_key,))
    print('Current Dataset Tag: %r' % (
        ut.invert_dict(DS_TAG_ALIAS2).get(dataset.alias_key, None),))

    if requests['ensuredata']:
        # Print alias key that maps to this particular dataset
        if ut.show_was_requested():
            interact_ = dataset.interact()  # NOQA
            return
        print('...exiting')
        sys.exit(1)

    # ----------------------------
    # Choose model architecture
    # TODO: data will need to return info about number of labels in viewpoint models
    # Specify model archichitecture
    ut.colorprint('[netrun] Architecture Specification', 'yellow')
    if arch_tag == 'siam2stream':
        model = models.SiameseCenterSurroundModel(
            data_shape=dataset.data_shape,
            training_dpath=dataset.training_dpath, **hyperparams)
    elif arch_tag.startswith('siam'):
        model = models.SiameseL2(
            data_shape=dataset.data_shape,
            arch_tag=arch_tag,
            training_dpath=dataset.training_dpath, **hyperparams)
    elif arch_tag == 'mnist-category':
        model = models.MNISTModel(
            data_shape=dataset.data_shape,
            output_dims=dataset.output_dims,
            arch_tag=arch_tag,
            training_dpath=dataset.training_dpath, **hyperparams)
        pass
    else:
        raise ValueError('Unknown arch_tag=%r' % (arch_tag,))

    ut.colorprint('[netrun] Initialize archchitecture', 'yellow')
    model.init_arch()

    # ----------------------------
    # Choose weight initialization
    ut.colorprint('[netrun] Setting weights', 'yellow')
    if checkpoint_tag == 'new':
        ut.colorprint('[netrun] * Initializing new weights', 'lightgray')
        model.reinit_weights()
    else:
        checkpoint_tag = model.resolve_fuzzy_checkpoint_pattern(
            checkpoint_tag, extern_dpath)
        ut.colorprint('[netrun] * Resolving weights checkpoint_tag=%r' %
                      (checkpoint_tag,), 'lightgray')
        if extern_dpath is not None:
            model.load_extern_weights(dpath=extern_dpath,
                                      checkpoint_tag=checkpoint_tag)
        elif model.has_saved_state(checkpoint_tag=checkpoint_tag):
            model.load_model_state(checkpoint_tag=checkpoint_tag)
        else:
            model_state_fpath = model.get_model_state_fpath(
                checkpoint_tag=checkpoint_tag)
            print('model_state_fpath = %r' % (model_state_fpath,))
            ut.checkpath(model_state_fpath, verbose=True)
            print('Known checkpoints are: ' + ut.repr3(model.list_saved_checkpoints()))
            raise ValueError(('Unresolved weight init: '
                              'checkpoint_tag=%r, extern_ds_tag=%r') % (
                                  checkpoint_tag, extern_ds_tag,))

    #print('Model State:')
    #print(model.get_state_str())
    # ----------------------------
    if not model.is_train_state_initialized():
        ut.colorprint('[netrun] Need to initialize training state', 'yellow')
        X_train, y_train = dataset.subset('train')
        model.ensure_data_params(X_train, y_train)

    # Run Actions
    if requests['train']:
        ut.colorprint('[netrun] Training Requested', 'yellow')
        # parse training arguments
        config = ut.argparse_dict(dict(
            era_size=15,
            max_epochs=1200,
            rate_decay=.8,
        ))
        model.monitor_config.update(**config)
        X_train, y_train = dataset.subset('train')
        X_valid, y_valid = dataset.subset('valid')
        model.fit(X_train, y_train, X_valid=X_valid, y_valid=y_valid)

    elif requests['test']:
        #assert model.best_results['epoch'] is not None
        ut.colorprint('[netrun] Test Requested', 'yellow')
        if requests['testall']:
            ut.colorprint('[netrun]  * Testing on all data', 'lightgray')
            X_test, y_test = dataset.subset('all')
            flat_metadata = dataset.subset_metadata('all')
        else:
            ut.colorprint('[netrun]  * Testing on test subset', 'lightgray')
            X_test, y_test = dataset.subset('test')
            flat_metadata = dataset.subset_metadata('test')
        data, labels = X_test, y_test
        dataname = dataset.alias_key
        experiments.test_siamese_performance(model, data, labels,
                                             flat_metadata, dataname)
    else:
        if not ut.get_argflag('--cmd'):
            raise ValueError('nothing here. need to train or test')

    if requests['publish']:
        ut.colorprint('[netrun] Publish Requested', 'yellow')
        publish_dpath = ut.truepath('~/Dropbox/IBEIS')
        published_model_state = ut.unixjoin(
            publish_dpath, model.arch_tag + '_model_state.pkl')
        ut.copy(model.get_model_state_fpath(), published_model_state)
        ut.view_directory(publish_dpath)
        print('You need to get the dropbox link and '
              'register it into the appropriate file')
        # pip install dropbox
        # https://www.dropbox.com/developers/core/start/python
        # import dropbox  # need oauth
        #client.share('/myfile.txt', short_url=False)
        # https://cthulhu.dyn.wildme.io/public/models/siaml2_128_model_state.pkl

    if ut.get_argflag('--cmd'):
        ut.embed()
Пример #29
0
def validate_model(
    cuda_str,
    python_exe,
    test_py_path,
    config_py_path,
    results_path,
    backup_path,
    validate_with_accuracy,
    deploy_path,
    deploy,
    deploy_tag,
    cleanup,
    cleanup_all,
    bin_path,
    cfg_path,
    data_path,
    weights_path,
    cache_species_str,
):
    import subprocess

    # Call testing
    # Example: CUDA_VISIBLE_DEVICE=X python bin/test.py -c -n cfg/yolo.py
    args = (
        cuda_str,
        python_exe,
        test_py_path,
        config_py_path,
        results_path,
        backup_path,
    )
    call_str = '%s%s %s -c -n %s --results %s %s/*' % args
    logger.info(call_str)
    subprocess.call(call_str, shell=True)
    assert exists(results_path)

    # Validate results
    with open(results_path, 'r') as results_file:
        line_list = results_file.readlines()

    if len(line_list) < 10:
        logger.info('VALIDATION ERROR!')
        ut.embed()

    result_list = []
    for line in line_list:
        logger.info(line)
        line = line.strip().split(',')
        if len(line) != 3:
            continue
        model_path, loss, accuracy = line
        loss = float(loss)
        accuracy = float(accuracy)
        miss_rate = (100.0 - accuracy) / 100.0
        if validate_with_accuracy:
            result = (miss_rate, loss, model_path)
        else:
            result = (loss, miss_rate, model_path)
        logger.info('\t%r' % (result, ))
        result_list.append(result)
    result_list = sorted(result_list)

    best_result = result_list[0]
    best_model_filepath = best_result[-1]

    # Copy best model, delete the rest
    ut.ensuredir(deploy_path)
    deploy_model_filepath = join(deploy_path, 'detect.lightnet.weights')
    deploy_config_filepath = join(deploy_path, 'detect.lightnet.py')
    ut.copy(best_model_filepath, deploy_model_filepath)
    ut.copy(config_py_path, deploy_config_filepath)

    # Cleanup
    if cleanup:
        ut.delete(backup_path)
        ut.delete(results_path)

        if cleanup_all:
            ut.delete(bin_path)
            ut.delete(cfg_path)
            ut.delete(data_path)
            ut.delete(weights_path)

    # Deploy
    final_path = join('/', 'data', 'public', 'models')
    if deploy:
        assert exists(final_path), 'Cannot deploy the model on this machine'
        if deploy_tag is None:
            deploy_tag = cache_species_str

        counter = 0
        while True:
            final_config_prefix = 'detect.lightnet.%s.v%d' % (
                deploy_tag,
                counter,
            )
            final_config_filename = '%s.py' % (final_config_prefix, )
            final_config_filepath = join(final_path, final_config_filename)
            if not exists(final_config_filepath):
                break
            counter += 1

        final_model_filename = '%s.weights' % (final_config_prefix, )
        final_model_filepath = join(final_path, final_model_filename)

        assert not exists(final_model_filepath)
        assert not exists(final_config_filepath)

        ut.copy(deploy_model_filepath, final_model_filepath)
        ut.copy(deploy_config_filepath, final_config_filepath)

        retval = (
            final_model_filepath,
            final_config_filepath,
        )
    else:
        retval = (
            deploy_model_filepath,
            deploy_config_filepath,
        )

    return retval
Пример #30
0
def update_schema_version(ibs, db, schema_spec, version, version_target,
                          dobackup=True, clearbackup=False):
    """
    version_target = version_expected
    clearbackup = False
    FIXME: AN SQL HELPER FUNCTION SHOULD BE AGNOSTIC TO CONTROLER OBJECTS
    """
    def _check_superkeys():
        all_tablename_list = db.get_table_names()
        # always ignore the metadata table.
        ignore_tables_ = ['metadata']
        tablename_list = [tablename for tablename in all_tablename_list
                          if tablename not in ignore_tables_]
        for tablename in tablename_list:
            superkey_colnames_list = db.get_table_superkey_colnames(tablename)
            # some tables seem to only have old constraints and aren't
            # properly updated to superkeys... weird.
            old_constraints = db.get_table_constraints(tablename)
            assert len(superkey_colnames_list) > 0 or len(old_constraints) > 0, (
                'ERROR UPDATING DATABASE, SUPERKEYS of %s DROPPED!' % (tablename,))

    print('[_SQL] update_schema_version')
    db_fpath = db.fpath
    if dobackup:
        db_dpath, db_fname = split(db_fpath)
        db_fname_noext, ext = splitext(db_fname)
        db_backup_fname = ''.join((db_fname_noext, '_backup', '_v', version, ext))
        db_backup_fpath = join(db_dpath, db_backup_fname)
        count = 0
        # TODO MAKE UTOOL THAT DOES THIS (there might be one in util_logging)
        while ut.checkpath(db_backup_fpath, verbose=True):
            db_backup_fname = ''.join((db_fname_noext, '_backup', '_v',
                                       version, '_copy', str(count), ext))
            db_backup_fpath = join(db_dpath, db_backup_fname)
            count += 1
        ut.copy(db_fpath, db_backup_fpath)

    legacy_update_funcs = schema_spec.LEGACY_UPDATE_FUNCTIONS
    for legacy_version, func in legacy_update_funcs:
        if compare_string_versions(version, legacy_version) == -1:
            func(db)
    db_versions = schema_spec.VALID_VERSIONS
    valid_versions = sorted(db_versions.keys(), compare_string_versions)
    try:
        start_index = valid_versions.index(version) + 1
    except IndexError:
        raise AssertionError('[!update_schema_version]'
                             ' The current database version is unknown')
    try:
        end_index = valid_versions.index(version_target) + 1
    except IndexError:
        raise AssertionError('[!update_schema_version]'
                             ' The target database version is unknown')

    try:
        print('Update path: %r ' % (valid_versions[start_index:end_index]))
        for index in range(start_index, end_index):
            next_version = valid_versions[index]
            print('Updating database to version: %r' % (next_version))
            pre, update, post = db_versions[next_version]
            if pre is not None:
                pre(db, ibs=ibs)
            if update is not None:
                update(db, ibs=ibs)
            if post is not None:
                post(db, ibs=ibs)
            _check_superkeys()
    except Exception as ex:
        if dobackup:
            msg = 'The database update failed, rolled back to the original version.'
            ut.printex(ex, msg, iswarning=True)
            ut.remove_file(db_fpath)
            ut.copy(db_backup_fpath, db_fpath)
            if clearbackup:
                ut.remove_file(db_backup_fpath)
            raise
        else:
            ut.printex(ex, (
                'The database update failed, and no backup was made.'),
                iswarning=False)
            raise
    if dobackup and clearbackup:
        ut.remove_file(db_backup_fpath)
Пример #31
0
def localizer_lightnet_train(ibs,
                             species_list,
                             cuda_device='0',
                             batches=60000,
                             validate_with_accuracy=True,
                             deploy_tag=None,
                             cleanup=True,
                             cleanup_all=True,
                             deploy=True,
                             **kwargs):
    from ibeis.algo.detect import lightnet
    import subprocess
    import datetime
    import sys

    assert species_list is not None
    species_list = sorted(species_list)

    lightnet_training_kit_url = lightnet._download_training_kit()
    _localizer_lightnet_validate_training_kit(lightnet_training_kit_url)

    hashstr = ut.random_nonce()[:16]
    cache_species_str = '-'.join(species_list)

    cache_path = join(ibs.cachedir, 'training', 'lightnet')
    ut.ensuredir(cache_path)
    training_instance_folder = 'lightnet-training-%s-%s' % (
        cache_species_str,
        hashstr,
    )
    training_instance_path = join(cache_path, training_instance_folder)
    ut.copy(lightnet_training_kit_url, training_instance_path)

    backup_path = join(training_instance_path, 'backup')
    bin_path = join(training_instance_path, 'bin')
    cfg_path = join(training_instance_path, 'cfg')
    data_path = join(training_instance_path, 'data')
    deploy_path = join(training_instance_path, 'deploy')
    weights_path = join(training_instance_path, 'darknet19_448.conv.23.pt')
    results_path = join(training_instance_path, 'results.txt')
    dataset_py_path = join(bin_path, 'dataset.template.py')
    labels_py_path = join(bin_path, 'labels.template.py')
    test_py_path = join(bin_path, 'test.template.py')
    train_py_path = join(bin_path, 'train.template.py')
    config_py_path = join(cfg_path, 'yolo.template.py')

    ibs.export_to_xml(species_list=species_list,
                      output_path=data_path,
                      **kwargs)

    species_str_list = ['%r' % (species, ) for species in species_list]
    species_str = ', '.join(species_str_list)
    replace_dict = {
        '_^_YEAR_^_': str(datetime.datetime.now().year),
        '_^_DATA_ROOT_^_': data_path,
        '_^_SPECIES_MAPPING_^_': species_str,
        '_^_NUM_BATCHES_^_': str(batches),
    }

    dataset_py_path = _localizer_lightnet_template_replace(
        dataset_py_path, replace_dict)
    labels_py_path = _localizer_lightnet_template_replace(
        labels_py_path, replace_dict)
    test_py_path = _localizer_lightnet_template_replace(
        test_py_path, replace_dict)
    train_py_path = _localizer_lightnet_template_replace(
        train_py_path, replace_dict)
    config_py_path = _localizer_lightnet_template_replace(
        config_py_path, replace_dict)
    assert exists(dataset_py_path)
    assert exists(labels_py_path)
    assert exists(test_py_path)
    assert exists(train_py_path)
    assert exists(config_py_path)
    assert not exists(backup_path)
    assert not exists(results_path)

    python_exe = sys.executable
    cuda_str = '' if cuda_device in [-1, None] or len(
        cuda_device) == 0 else 'CUDA_VISIBLE_DEVICES=%s ' % (cuda_device, )

    # Call labels
    call_str = '%s %s' % (
        python_exe,
        labels_py_path,
    )
    print(call_str)
    subprocess.call(call_str, shell=True)

    # Call training
    # Example: CUDA_VISIBLE_DEVICES=0 python bin/train.py -c -n cfg/yolo.py -c darknet19_448.conv.23.pt
    args = (cuda_str, python_exe, train_py_path, config_py_path, backup_path,
            weights_path)
    call_str = '%s%s %s -c -n %s -b %s %s' % args
    print(call_str)
    subprocess.call(call_str, shell=True)
    assert exists(backup_path)

    # Call testing
    # Example: CUDA_VISIBLE_DEVICE=0 python bin/test.py -c -n cfg/yolo.py
    args = (
        cuda_str,
        python_exe,
        test_py_path,
        config_py_path,
        results_path,
        backup_path,
    )
    call_str = '%s%s %s -c -n %s --results %s %s/*' % args
    print(call_str)
    subprocess.call(call_str, shell=True)
    assert exists(results_path)

    # Validate results
    with open(results_path, 'r') as results_file:
        line_list = results_file.readlines()

    result_list = []
    for line in line_list:
        line = line.strip().split(',')
        if len(line) != 3:
            continue
        model_path, loss, accuracy = line
        loss = float(loss)
        accuracy = float(accuracy)
        miss_rate = (100.0 - accuracy) / 100.0
        if validate_with_accuracy:
            result = (miss_rate, loss, model_path)
        else:
            result = (loss, miss_rate, model_path)
        result_list.append(result)
    result_list = sorted(result_list)

    best_result = result_list[0]
    best_model_filepath = best_result[-1]

    # Copy best model, delete the rest
    ut.ensuredir(deploy_path)
    deploy_model_filepath = join(deploy_path, 'detect.lightnet.weights')
    deploy_config_filepath = join(deploy_path, 'detect.lightnet.py')
    ut.copy(best_model_filepath, deploy_model_filepath)
    ut.copy(config_py_path, deploy_config_filepath)

    # Cleanup
    if cleanup:
        ut.delete(backup_path)
        ut.delete(results_path)

        if cleanup_all:
            ut.delete(bin_path)
            ut.delete(cfg_path)
            ut.delete(data_path)
            ut.delete(weights_path)

    # Deploy
    final_path = join('/', 'data', 'public', 'models')
    if deploy:
        assert exists(final_path), 'Cannot deploy the model on this machine'
        if deploy_tag is None:
            deploy_tag = cache_species_str

        counter = 0
        while True:
            final_config_prefix = 'detect.lightnet.%s.v%d' % (
                deploy_tag,
                counter,
            )
            final_config_filename = '%s.py' % (final_config_prefix, )
            final_config_filepath = join(final_path, final_config_filename)
            if not exists(final_config_filepath):
                break
            counter += 1

        final_model_filename = '%s.weights' % (final_config_prefix, )
        final_model_filepath = join(final_path, final_model_filename)

        assert not exists(final_model_filepath)
        assert not exists(final_config_filepath)

        ut.copy(deploy_model_filepath, final_model_filepath)
        ut.copy(deploy_config_filepath, final_config_filepath)

        retval = (
            final_model_filepath,
            final_config_filepath,
        )
    else:
        retval = (
            deploy_model_filepath,
            deploy_config_filepath,
        )

    return retval
Пример #32
0
def classifier_multiclass_densenet_train(ibs,
                                         gid_list,
                                         label_list,
                                         ensembles=3,
                                         **kwargs):
    """
    >>> import uuid
    >>> manifest_filepath = join(ibs.dbdir, 'flukebook_groundtruth.csv')
    >>> with open(manifest_filepath, 'r') as manifest_file:
    >>>     line_list = manifest_file.readlines()
    >>>
    >>> label_dict = {
    >>>     'Left Dorsal Fin'  : 'left_dorsal_fin',
    >>>     'Right Dorsal Fin' : 'right_dorsal_fin',
    >>>     'Tail Fluke'       : 'tail_fluke',
    >>> }
    >>>
    >>> uuid_list = []
    >>> label_list = []
    >>> for line in line_list:
    >>>     line = line.strip().split(',')
    >>>     assert len(line) == 2
    >>>     uuid_, label_ = line
    >>>     uuid_ = uuid.UUID(uuid_)
    >>>     label_ = label_.strip()
    >>>     print(uuid_, label_)
    >>>     uuid_list.append(uuid_)
    >>>     label_ = label_dict.get(label_, None)
    >>>     assert label_ is not None
    >>>     label_list.append(label_)
    >>>
    >>> gid_list = ibs.get_image_gids_from_uuid(uuid_list)
    >>> assert None not in gid_list
    >>> # archive_path = ibs.classifier_multiclass_densenet_train(gid_list, label_list)
    >>> ibs.classifier2_precision_recall_algo_display(test_gid_list=gid_list, test_label_list=label_list)
    """
    from ibeis_cnn.ingest_ibeis import get_cnn_classifier_multiclass_training_images_pytorch
    from ibeis.algo.detect import densenet

    data_path = join(ibs.get_cachedir(), 'extracted-classifier-multiclass')
    extracted_path = get_cnn_classifier_multiclass_training_images_pytorch(
        ibs,
        gid_list,
        label_list,
        dest_path=data_path,
        image_size=densenet.INPUT_SIZE,
        **kwargs)

    weights_path_list = []
    for ensemble_num in range(ensembles):
        args = (ensemble_num, )
        output_path = join(ibs.get_cachedir(), 'training',
                           'classifier-multiclass-ensemble-%d' % args)
        weights_path = densenet.train(extracted_path,
                                      output_path,
                                      blur=True,
                                      flip=False)
        weights_path_list.append(weights_path)

    archive_name = 'classifier.multiclass.zip'
    archive_path = join(ibs.get_cachedir(), 'training', archive_name)
    ensemble_weights_path_list = []

    for index, weights_path in enumerate(sorted(weights_path_list)):
        assert exists(weights_path)
        ensemble_weights_path = 'classifier.multiclass.%d.weights' % (index, )
        ut.copy(weights_path, ensemble_weights_path)
        ensemble_weights_path_list.append(ensemble_weights_path)

    ut.archive_files(archive_path,
                     ensemble_weights_path_list,
                     overwrite=True,
                     common_prefix=True)

    return archive_path
Пример #33
0
def install_wildbook(verbose=ut.NOT_QUIET):
    """
    Script to setup wildbook on a unix based system
    (hopefully eventually this will generalize to win32)

    CommandLine:
        # Reset
        python -m ibeis --tf reset_local_wildbook
        # Setup
        python -m ibeis --tf install_wildbook
        # Startup
        python -m ibeis --tf startup_wildbook_server --show --exec-mode

        # Reset
        python -m ibeis.control.manual_wildbook_funcs --test-reset_local_wildbook
        # Setup
        python -m ibeis.control.manual_wildbook_funcs --test-install_wildbook
        # Startup
        python -m ibeis.control.manual_wildbook_funcs --test-startup_wildbook_server --show --exec-mode


    Example:
        >>> # SCRIPT
        >>> from ibeis.control.manual_wildbook_funcs import *  # NOQA
        >>> verbose = True
        >>> result = install_wildbook()
        >>> print(result)
    """
    # TODO: allow custom specified tomcat directory
    from os.path import basename, splitext, join
    import time
    import re
    import subprocess
    try:
        output = subprocess.check_output(['java', '-version'],
                                         stderr=subprocess.STDOUT)
        _java_version = output.split('\n')[0]
        _java_version = _java_version.replace('java version ', '')
        java_version = _java_version.replace('"', '')
        print('java_version = %r' % (java_version,))
        if not java_version.startswith('1.7'):
            print('Warning wildbook is only supported for java 1.7')
    except OSError:
        output = None
    if output is None:
        raise ImportError(
            'Cannot find java on this machine. '
            'Please install java: http://www.java.com/en/download/')

    tomcat_dpath = find_or_download_tomcat()
    assert tomcat_dpath is not None, 'Could not find tomcat'
    war_fpath = find_or_download_wilbook_warfile()
    war_fname = basename(war_fpath)
    wb_target = splitext(war_fname)[0]

    # Ensure environment variables
    #os.environ['JAVA_HOME'] = find_java_jvm()
    #os.environ['TOMCAT_HOME'] = tomcat_dpath
    #os.environ['CATALINA_HOME'] = tomcat_dpath

    # Move the war file to tomcat webapps if not there
    webapps_dpath = join(tomcat_dpath, 'webapps')
    deploy_war_fpath = join(webapps_dpath, war_fname)
    if not ut.checkpath(deploy_war_fpath, verbose=verbose):
        ut.copy(war_fpath, deploy_war_fpath)

    # Ensure that the war file has been unpacked

    unpacked_war_dpath = join(webapps_dpath, wb_target)
    if not ut.checkpath(unpacked_war_dpath, verbose=verbose):
        # Need to make sure you start catalina in the same directory otherwise
        # the derby databsae gets put in in cwd
        tomcat_startup_dir = get_tomcat_startup_tmpdir()
        with ut.ChdirContext(tomcat_startup_dir):
            # Starting and stoping catalina should be sufficient to unpack the
            # war
            startup_fpath  = join(tomcat_dpath, 'bin', 'startup.sh')
            shutdown_fpath = join(tomcat_dpath, 'bin', 'shutdown.sh')
            ut.cmd(ut.quote_single_command(startup_fpath))
            print('It is NOT ok if the startup.sh fails\n')

            # wait for the war to be unpacked
            for retry_count in range(0, 6):
                time.sleep(1)
                if ut.checkpath(unpacked_war_dpath, verbose=True):
                    break
                else:
                    print('Retrying')

            # ensure that the server is ruuning
            import requests
            print('Checking if we can ping the server')
            response = requests.get('http://localhost:8080')
            if response is None or response.status_code != 200:
                print('There may be an error starting the server')
            else:
                print('Seem able to ping the server')

            # assert tht the war was unpacked
            ut.assertpath(unpacked_war_dpath, (
                'Wildbook war might have not unpacked correctly.  This may '
                'be ok. Try again. If it fails a second time, then there is a '
                'problem.'), verbose=True)

            # shutdown the server
            ut.cmd(ut.quote_single_command(shutdown_fpath))
            print('It is ok if the shutdown.sh fails')
            time.sleep(.5)

    # Make sure permissions are correctly set in wildbook
    # Comment out the line that requires authentication
    permission_fpath = join(unpacked_war_dpath, 'WEB-INF/web.xml')
    ut.assertpath(permission_fpath)
    permission_text = ut.readfrom(permission_fpath)
    lines_to_remove = [
        '/EncounterSetMarkedIndividual = authc, roles[admin]'
    ]
    new_permission_text = permission_text[:]
    for line in lines_to_remove:
        re.search(re.escape(line), permission_text)
        prefix = ut.named_field('prefix', '\\s*')
        suffix = ut.named_field('suffix', '\\s*\n')
        pattern = ('^' + prefix + re.escape(line) + suffix)
        match = re.search(pattern, permission_text,
                          flags=re.MULTILINE | re.DOTALL)
        if match is None:
            continue
        newline = '<!--%s -->' % (line,)
        repl = ut.bref_field('prefix') + newline + ut.bref_field('suffix')
        new_permission_text = re.sub(pattern, repl, permission_text,
                                     flags=re.MULTILINE | re.DOTALL)
        assert new_permission_text != permission_text, (
            'text should have changed')
    if new_permission_text != permission_text:
        print('Need to write new permission texts')
        ut.writeto(permission_fpath, new_permission_text)
    else:
        print('Permission file seems to be ok')

    print('Wildbook is installed and waiting to be started')
Пример #34
0
#        '-dNOPAUSE',
#        '-dQUIET',
#        '-dBATCH',
#        '-sOutputFile=' + output_pdf_fpath,
#        pdf_fpath
#    )
#    ut.cmd(*cmd_list)
#    return output_pdf_fpath

if __name__ == '__main__':
    """
    CommandLine:
        ./compress_latex.py
    """
    if len(sys.argv) == 1:
        abs_file = abspath(__file__)

        pdf_fpath = join(dirname(abs_file), 'main.pdf')
        output_fname = basename(dirname(abs_file))
        import re
        output_fname = re.sub('\d', '', output_fname).strip('-').strip('_')
    else:
        pdf_fpath = sys.argv[1]
        output_fname = None
    output_pdf_fpath = ut.compress_pdf(pdf_fpath, output_fname=output_fname)

    PUBLISH = True
    if PUBLISH:
        publish_path = ut.truepath('~/Dropbox/crall')
        ut.copy(output_pdf_fpath, publish_path)
        version = 'valid'
        output_path_folders = output_path_valid_folders
        output_path_manifest = output_path_valid_manifest

    # if aid in test_aid_set:
    #     assert aid not in valid_aid_set
    #     version = 'test'
    #     output_path_folders = output_path_test_folders
    #     output_path_manifest = output_path_test_manifest

    name_output_path = join(output_path_folders, name_text)
    if not exists(name_output_path):
        ut.ensuredir(name_output_path)
    annot_output_filepath = join(name_output_path, '%s.jpg' % (uuid_str, ))
    assert not exists(annot_output_filepath)
    ut.copy(chip_filepath, annot_output_filepath, verbose=False)

    annot_output_filename = '%s.jpg' % (uuid_str, )
    annot_output_filepath = join(output_path_manifest, annot_output_filename)
    assert not exists(annot_output_filepath)
    ut.copy(chip_filepath, annot_output_filepath, verbose=False)
    manifest_line = '%s,%s,%s' % (
        annot_output_filename,
        name_text,
        named_humpback_unixtime,
    )
    manifest_dict[version].append(manifest_line)

for manifest_key in manifest_dict:
    manifest_list = manifest_dict[manifest_key]
    manifest_list = sorted(manifest_list)