def get_zotero_path():
    from os.path import expanduser
    if ut.get_computer_name().lower() == 'hyrule':
        zotero_fpath = expanduser('~/.zotero/zotero/a4dxx4ff.default/zotero')
    elif ut.get_computer_name().lower() == 'ooo':
        zotero_fpath = expanduser('~/.zotero/zotero/a4dxx4ff.default/zotero')
        # zotero_fpath = expanduser('~/AppData/Roaming/Zotero/Zotero/Profiles/xrmkwlkz.default/zotero')
    return zotero_fpath
Example #2
0
def get_big_imgdir(workdir):
    """
    Get a place where a lot of images are.
    this probably will only work on jon's machines
    """
    if utool.get_computer_name() == 'BakerStreet':
        imgdir = r'D:\data\raw\Animals\Grevys\gz_mpala_cropped\images'
    elif  utool.get_computer_name() == 'Hyrule':
        imgdir = join(workdir, 'GZ_Cropped/images')
    elif  utool.get_computer_name() == 'Ooo':
        imgdir = join(workdir, 'FROG_tufts/images')
    else:
        raise AssertionError('this test only works on Jons computers')
    return imgdir
Example #3
0
def get_big_imgdir(workdir):
    """
    Get a place where a lot of images are.
    this probably will only work on jon's machines
    """
    if utool.get_computer_name() == 'BakerStreet':
        imgdir = r'D:\data\raw\Animals\Grevys\gz_mpala_cropped\images'
    elif utool.get_computer_name() == 'Hyrule':
        imgdir = join(workdir, 'GZ_Cropped/images')
    elif utool.get_computer_name() == 'Ooo':
        imgdir = join(workdir, 'FROG_tufts/images')
    else:
        raise AssertionError('this test only works on Jons computers')
    return imgdir
Example #4
0
    def _setup_links(self, cfg_prefix, config=None):
        """
        Called only when setting up an experiment to make a measurement.

        Creates symlinks such that all data is written to a directory that
        depends on a computer name, cfg_prefix and an arbitrary configuration
        dict.

        Then force the link in the basic directory to point to abs_dpath.
        """
        # Setup directory
        from os.path import expanduser
        assert self.dname is not None

        computer_id = ut.get_argval('--comp', default=ut.get_computer_name())

        conf_dpath = ut.ensuredir((expanduser(self.base_dpath), 'configured'))
        comp_dpath = ut.ensuredir((join(conf_dpath, computer_id)))

        link_dpath = ut.ensuredir((self.base_dpath, 'link'))

        # if True:
        #     # move to new system
        #     old_dpath = join(conf_dpath, self.dbname + '_' + computer_id)
        #     if exists(old_dpath):
        #         ut.move(old_dpath, join(comp_dpath, self.dbname))

        try:
            cfgstr = ut.repr3(config.getstate_todict_recursive())
        except AttributeError:
            cfgstr = ut.repr3(config)

        hashid = ut.hash_data(cfgstr)[0:6]
        suffix = '_'.join([cfg_prefix, hashid])
        dbcode = self.dbname + '_' + suffix

        abs_dpath = ut.ensuredir(join(comp_dpath, dbcode))

        self.dname = dbcode
        self.dpath = abs_dpath
        self.abs_dpath = abs_dpath

        # Place a basic link in the base link directory
        links = []
        links.append(expanduser(join(link_dpath, self.dbname)))
        # # Make a configured but computer agnostic link
        # links.append(expanduser(join(conf_dpath, self.dbname)))

        for link in links:
            try:
                # Overwrite any existing link so the most recently used is
                # the default
                self.link = ut.symlink(abs_dpath, link, overwrite=True)
            except Exception:
                if exists(abs_dpath):
                    newpath = ut.non_existing_path(abs_dpath, suffix='_old')
                    ut.move(link, newpath)
                    self.link = ut.symlink(abs_dpath, link)

        ut.writeto(join(abs_dpath, 'info.txt'), cfgstr)
Example #5
0
def add_new_temp_contributor(ibs, user_prompt=False, offset=None, autolocate=False):
    r"""
    Auto-docstr for 'add_new_temp_contributor'

    RESTful:
        Method: POST
        URL:    /api/contributor/new_temp/
    """
    name_first = ibs.get_dbname()
    name_last = ut.get_computer_name() + ':' + ut.get_user_name() + ':' + ibs.get_dbdir()
    print('[collect_transfer_data] Contributor default first name: %s' % (name_first, ))
    print('[collect_transfer_data] Contributor default last name:  %s' % (name_last, ))
    if user_prompt:
        name_first = input('\n[collect_transfer_data] Change first name (Enter to use default): ')
        name_last  = input('\n[collect_transfer_data] Change last name (Enter to use default): ')

    if autolocate:
        success, location_city, location_state, location_country, location_zip = ut.geo_locate()
    else:
        success = False

    if success:
        print('\n[collect_transfer_data] Your location was be determined automatically.')
        print('[collect_transfer_data] Contributor default city: %s'    % (location_city, ))
        print('[collect_transfer_data] Contributor default state: %s'   % (location_state, ))
        print('[collect_transfer_data] Contributor default zip: %s'     % (location_country, ))
        print('[collect_transfer_data] Contributor default country: %s' % (location_zip, ))
        if user_prompt:
            location_city    = input('\n[collect_transfer_data] Change default location city (Enter to use default): ')
            location_state   = input('\n[collect_transfer_data] Change default location state (Enter to use default): ')
            location_zip     = input('\n[collect_transfer_data] Change default location zip (Enter to use default): ')
            location_country = input('\n[collect_transfer_data] Change default location country (Enter to use default): ')
    else:
        if user_prompt:
            print('\n')
        print('[collect_transfer_data] Your location could not be determined automatically.')
        if user_prompt:
            location_city    = input('[collect_transfer_data] Enter your location city (Enter to skip): ')
            location_state   = input('[collect_transfer_data] Enter your location state (Enter to skip): ')
            location_zip     = input('[collect_transfer_data] Enter your location zip (Enter to skip): ')
            location_country = input('[collect_transfer_data] Enter your location country (Enter to skip): ')
        else:
            location_city    = ''
            location_state   = ''
            location_zip     = ''
            location_country = ''

    #tag = '::'.join([name_first, name_last, location_city, location_state, location_zip, location_country])
    tag_components = [name_first, name_last, location_city, location_state, location_zip, location_country]
    if offset is not None:
        tag_components += [str(offset)]
    tag_components_clean = [comp.replace(';', '<semi>') for comp in tag_components]
    tag = ','.join(tag_components_clean)
    contrib_rowid = ibs.add_contributors(
        [tag], name_first_list=[name_first],
        name_last_list=[name_last], loc_city_list=[location_city],
        loc_state_list=[location_state], loc_country_list=[location_country],
        loc_zip_list=[location_zip])[0]
    return contrib_rowid
Example #6
0
 def __setstate__(ibs, state):
     # Hack to allow for ibeis objects to be pickled
     import ibeis
     dbdir = state['dbdir']
     machine_name = state.pop('machine_name')
     try:
         assert machine_name == ut.get_computer_name(), (
             'ibeis objects can only be picked and unpickled on the same machine')
     except AssertionError as ex:
         iswarning = ut.checkpath(dbdir)
         ut.printex(ex, iswarning=iswarning)
         if not iswarning:
             raise
     ibs2 = ibeis.opendb(dbdir=dbdir, web=False)
     ibs.__dict__.update(**ibs2.__dict__)
Example #7
0
 def __getstate__(ibs):
     """
     Example:
         >>> # ENABLE_DOCTEST
         >>> import wbia
         >>> from six.moves import cPickle as pickle
         >>> ibs = wbia.opendb('testdb1')
         >>> ibs_dump = pickle.dumps(ibs)
         >>> ibs2 = pickle.loads(ibs_dump)
     """
     # Hack to allow for wbia objects to be pickled
     state = {
         'dbdir': ibs.get_dbdir(),
         'machine_name': ut.get_computer_name(),
     }
     return state
Example #8
0
def get_avail_geom(monitor_num=None, percent_w=1.0, percent_h=1.0):
    stdpxls = get_stdpxls()
    if monitor_num is None:
        if ut.get_computer_name() == 'Ooo':
            monitor_num = 1
        else:
            monitor_num = 0
    monitor_geometries = get_monitor_geometries()
    try:
        (startx, starty, availw, availh) = monitor_geometries[monitor_num]
    except KeyError:
        (startx, starty, availw, availh) = six.itervalues(monitor_geometries).next()
    available_geom = (startx,
                      starty,
                      availw * percent_w,
                      (availh - stdpxls['os_border_h']) * percent_h)
    return available_geom
Example #9
0
def set_annot_pair_as_reviewed(ibs, aid1, aid2):
    """ denote that this match was reviewed and keep whatever status it is given """
    isunknown1, isunknown2 = ibs.is_aid_unknown([aid1, aid2])
    if isunknown1 or isunknown2:
        truth = ibs.const.EVIDENCE_DECISION.UNKNOWN
    else:
        nid1, nid2 = ibs.get_annot_name_rowids((aid1, aid2))
        truth = (ibs.const.EVIDENCE_DECISION.POSITIVE if
                 (nid1 == nid2) else ibs.const.EVIDENCE_DECISION.NEGATIVE)

    # Ensure a row exists for this pair
    annotmatch_rowids = ibs.add_annotmatch_undirected([aid1], [aid2])

    # Old functionality, remove. Reviewing should not set truth
    confidence = ibs.const.CONFIDENCE.CODE_TO_INT['guessing']
    ibs.set_annotmatch_evidence_decision(annotmatch_rowids, [truth])
    user_id = ut.get_user_name() + '@' + ut.get_computer_name()
    ibs.set_annotmatch_reviewer(annotmatch_rowids, ['user:'******'... set truth=%r' % (truth, ))
Example #10
0
    python -m ibeis update_wildbook_install_config

"""
from __future__ import absolute_import, division, print_function
import utool as ut
import subprocess
import re
import time
import os
from os.path import dirname, join, basename, splitext
print, rrr, profile = ut.inject2(__name__)

#PREFERED_BROWSER = 'chrome'
#webbrowser._tryorder
PREFERED_BROWSER = None
if ut.get_computer_name() == 'hyrule':
    PREFERED_BROWSER = 'firefox'

# FIXME add as controller config
#ALLOW_SYSTEM_TOMCAT = ut.get_argflag('--allow-system-tomcat')


def get_tomcat_startup_tmpdir():
    dpath_list = [
        #os.environ.get('CATALINA_TMPDIR', None),
        ut.ensure_app_resource_dir('ibeis', 'tomcat', 'ibeis_startup_tmpdir'),
    ]
    tomcat_startup_dir = ut.search_candidate_paths(dpath_list, verbose=True)
    return tomcat_startup_dir

Example #11
0
def add_new_temp_contributor(ibs,
                             user_prompt=False,
                             offset=None,
                             autolocate=False):
    r"""

    RESTful:
        Method: POST
        URL:    /api/contributor/new/temp/
    """
    name_first = ibs.get_dbname()
    name_last = ut.get_computer_name() + ':' + ut.get_user_name(
    ) + ':' + ibs.get_dbdir()
    print('[collect_transfer_data] Contributor default first name: %s' %
          (name_first, ))
    print('[collect_transfer_data] Contributor default last name:  %s' %
          (name_last, ))
    if user_prompt:
        name_first = input(
            '\n[collect_transfer_data] Change first name (Enter to use default): '
        )
        name_last = input(
            '\n[collect_transfer_data] Change last name (Enter to use default): '
        )

    if autolocate:
        success, location_city, location_state, location_country, location_zip = ut.geo_locate(
        )
    else:
        success = False

    if success:
        print(
            '\n[collect_transfer_data] Your location was be determined automatically.'
        )
        print('[collect_transfer_data] Contributor default city: %s' %
              (location_city, ))
        print('[collect_transfer_data] Contributor default state: %s' %
              (location_state, ))
        print('[collect_transfer_data] Contributor default zip: %s' %
              (location_country, ))
        print('[collect_transfer_data] Contributor default country: %s' %
              (location_zip, ))
        if user_prompt:
            location_city = input(
                '\n[collect_transfer_data] Change default location city (Enter to use default): '
            )
            location_state = input(
                '\n[collect_transfer_data] Change default location state (Enter to use default): '
            )
            location_zip = input(
                '\n[collect_transfer_data] Change default location zip (Enter to use default): '
            )
            location_country = input(
                '\n[collect_transfer_data] Change default location country (Enter to use default): '
            )
    else:
        if user_prompt:
            print('\n')
        print(
            '[collect_transfer_data] Your location could not be determined automatically.'
        )
        if user_prompt:
            location_city = input(
                '[collect_transfer_data] Enter your location city (Enter to skip): '
            )
            location_state = input(
                '[collect_transfer_data] Enter your location state (Enter to skip): '
            )
            location_zip = input(
                '[collect_transfer_data] Enter your location zip (Enter to skip): '
            )
            location_country = input(
                '[collect_transfer_data] Enter your location country (Enter to skip): '
            )
        else:
            location_city = ''
            location_state = ''
            location_zip = ''
            location_country = ''

    #tag = '::'.join([name_first, name_last, location_city, location_state, location_zip, location_country])
    tag_components = [
        name_first, name_last, location_city, location_state, location_zip,
        location_country
    ]
    if offset is not None:
        tag_components += [str(offset)]
    tag_components_clean = [
        comp.replace(';', '<semi>') for comp in tag_components
    ]
    tag = ','.join(tag_components_clean)
    contributor_rowid = ibs.add_contributors(
        [tag],
        name_first_list=[name_first],
        name_last_list=[name_last],
        loc_city_list=[location_city],
        loc_state_list=[location_state],
        loc_country_list=[location_country],
        loc_zip_list=[location_zip])[0]
    return contributor_rowid
Example #12
0
def prometheus_update(ibs, *args, **kwargs):
    try:
        with ut.Timer(verbose=False) as timer:
            if ibs.containerized:
                container_name = const.CONTAINER_NAME
            else:
                container_name = ibs.dbname

            global PROMETHEUS_COUNTER

            PROMETHEUS_COUNTER = PROMETHEUS_COUNTER + 1  # NOQA
            # logger.info('PROMETHEUS LIMIT %d / %d' % (PROMETHEUS_COUNTER, PROMETHEUS_LIMIT, ))

            if PROMETHEUS_COUNTER >= PROMETHEUS_LIMIT:
                PROMETHEUS_COUNTER = 0

                try:
                    PROMETHEUS_DATA['info'].info(
                        {
                            'uuid': str(ibs.get_db_init_uuid()),
                            'dbname': ibs.dbname,
                            'hostname': ut.get_computer_name(),
                            'container': container_name,
                            'version': ibs.db.get_db_version(),
                            'containerized': str(int(ibs.containerized)),
                            'production': str(int(ibs.production)),
                        }
                    )
                except Exception:
                    pass

                try:
                    if ibs.production:
                        num_imageset_rowids = 0
                        num_gids = 0
                        num_aids = 0
                        num_pids = 0
                        num_nids = 0
                        num_species = 0
                    else:
                        num_imageset_rowids = len(ibs._get_all_imageset_rowids())
                        num_gids = len(ibs._get_all_gids())
                        num_aids = len(ibs._get_all_aids())
                        num_pids = len(ibs._get_all_part_rowids())
                        num_nids = len(ibs._get_all_name_rowids())
                        num_species = len(ibs._get_all_species_rowids())

                    PROMETHEUS_DATA['imagesets'].labels(name=container_name).set(
                        num_imageset_rowids
                    )
                    PROMETHEUS_DATA['images'].labels(name=container_name).set(num_gids)
                    PROMETHEUS_DATA['annotations'].labels(name=container_name).set(
                        num_aids
                    )
                    PROMETHEUS_DATA['parts'].labels(name=container_name).set(num_pids)
                    PROMETHEUS_DATA['names'].labels(name=container_name).set(num_nids)
                    PROMETHEUS_DATA['species'].labels(name=container_name).set(
                        num_species
                    )
                except Exception:
                    pass

                try:
                    job_status_dict = ibs.get_job_status()['json_result']
                except Exception:
                    pass

                try:
                    job_uuid_list = list(job_status_dict.keys())
                    status_dict_template = {
                        'received': 0,
                        'accepted': 0,
                        'queued': 0,
                        'working': 0,
                        'publishing': 0,
                        'completed': 0,
                        'exception': 0,
                        'suppressed': 0,
                        'corrupted': 0,
                        '_error': 0,
                    }
                    status_dict = {
                        '*': status_dict_template.copy(),
                        'max': status_dict_template.copy(),
                    }

                    endpoints = set([])
                    working_endpoint = None
                except Exception:
                    pass

                for job_uuid in job_uuid_list:
                    try:
                        job_status = job_status_dict[job_uuid]

                        status = job_status['status']
                        endpoint = job_status['endpoint']
                        jobcounter = job_status['jobcounter']

                        status = '%s' % (status,)
                        endpoint = '%s' % (endpoint,)

                        if status not in status_dict_template.keys():
                            status = '_error'

                        if endpoint not in status_dict:
                            status_dict[endpoint] = status_dict_template.copy()

                        endpoints.add(endpoint)
                    except Exception:
                        pass

                    try:
                        if status in ['working']:
                            from wbia.web.job_engine import (
                                calculate_timedelta,
                                _timestamp,
                            )

                            started = job_status['time_started']
                            now = _timestamp()
                            (
                                hours,
                                minutes,
                                seconds,
                                total_seconds,
                            ) = calculate_timedelta(started, now)
                            logger.info(
                                'ELAPSED (%s): %d seconds...' % (job_uuid, total_seconds)
                            )
                            PROMETHEUS_DATA['elapsed'].labels(
                                name=container_name, endpoint=endpoint
                            ).set(total_seconds)
                            PROMETHEUS_DATA['elapsed'].labels(
                                name=container_name, endpoint='*'
                            ).set(total_seconds)
                            working_endpoint = endpoint
                    except Exception:
                        pass

                    try:
                        if status not in status_dict_template:
                            logger.info('UNRECOGNIZED STATUS %r' % (status,))
                        status_dict[endpoint][status] += 1
                        status_dict['*'][status] += 1

                        current_max = status_dict['max'][status]
                        status_dict['max'][status] = max(current_max, jobcounter)

                        if job_uuid not in PROMETHUS_JOB_CACHE_DICT:
                            PROMETHUS_JOB_CACHE_DICT[job_uuid] = {}
                    except Exception:
                        pass

                    try:
                        runtime_sec = job_status.get('time_runtime_sec', None)
                        if (
                            runtime_sec is not None
                            and 'runtime' not in PROMETHUS_JOB_CACHE_DICT[job_uuid]
                        ):
                            PROMETHUS_JOB_CACHE_DICT[job_uuid]['runtime'] = runtime_sec
                            PROMETHEUS_DATA['runtime'].labels(
                                name=container_name, endpoint=endpoint
                            ).set(runtime_sec)
                            PROMETHEUS_DATA['runtime'].labels(
                                name=container_name, endpoint='*'
                            ).set(runtime_sec)
                    except Exception:
                        pass

                    try:
                        turnaround_sec = job_status.get('time_turnaround_sec', None)
                        if (
                            turnaround_sec is not None
                            and 'turnaround' not in PROMETHUS_JOB_CACHE_DICT[job_uuid]
                        ):
                            PROMETHUS_JOB_CACHE_DICT[job_uuid][
                                'turnaround'
                            ] = turnaround_sec
                            PROMETHEUS_DATA['turnaround'].labels(
                                name=container_name, endpoint=endpoint
                            ).set(turnaround_sec)
                            PROMETHEUS_DATA['turnaround'].labels(
                                name=container_name, endpoint='*'
                            ).set(turnaround_sec)
                    except Exception:
                        pass

                try:
                    if working_endpoint is None:
                        PROMETHEUS_DATA['elapsed'].labels(
                            name=container_name, endpoint='*'
                        ).set(0.0)

                    for endpoint in endpoints:
                        if endpoint == working_endpoint:
                            continue
                        PROMETHEUS_DATA['elapsed'].labels(
                            name=container_name, endpoint=endpoint
                        ).set(0.0)
                except Exception:
                    pass

                try:
                    # logger.info(ut.repr3(status_dict))
                    for endpoint in status_dict:
                        for status in status_dict[endpoint]:
                            number = status_dict[endpoint][status]
                            PROMETHEUS_DATA['engine'].labels(
                                status=status, name=container_name, endpoint=endpoint
                            ).set(number)
                except Exception:
                    pass

                try:
                    # logger.info(ut.repr3(status_dict))
                    process_status_dict = ibs.get_process_alive_status()
                    for process in process_status_dict:
                        number = 0 if process_status_dict.get(process, False) else 1
                        PROMETHEUS_DATA['process'].labels(
                            process=process, name=container_name
                        ).set(number)
                except Exception:
                    pass
        try:
            PROMETHEUS_DATA['update'].labels(name=container_name).set(timer.ellapsed)
        except Exception:
            pass
    except Exception:
        pass
Example #13
0
import numpy as np
import math
import utool as ut
from collections import OrderedDict
from os.path import join

(print, rrr, profile) = ut.inject2(__name__)
logger = logging.getLogger('wbia')

CONTAINERIZED = ut.get_argflag('--containerized')
PRODUCTION = ut.get_argflag('--production')
HTTPS = ut.get_argflag('--https')

CONTAINER_NAME = ut.get_argval('--container-name',
                               type_=str,
                               default=ut.get_computer_name())
ENGINE_SLOT = ut.get_argval('--engine-slot', type_=str, default='default')

PI = math.pi
TAU = 2.0 * PI

VIEWTEXT_TO_YAW_RADIANS = OrderedDict([
    (
        'right',
        0.000 * TAU,
    ),
    (
        'frontright',
        0.125 * TAU,
    ),
    (
Example #14
0
def main(gui=True, dbdir=None, defaultdb='cache',
         allow_newdir=False, db=None,
         delete_ibsdir=False,
         **kwargs):
    """
    Program entry point
    Inits the system environment, an IBEISControl, and a GUI if requested

    Args:
        gui (bool): (default=True) If gui is False a gui instance will not be created
        dbdir (None): full directory of a database to load
        db (None): name of database to load relative to the workdir
        allow_newdir (bool): (default=False) if False an error is raised if a
            a new database is created
        defaultdb (str): codename of database to load if db and dbdir is None. a value
            of 'cache' will open the last database opened with the GUI.

    Returns:
        dict: main_locals
    """
    set_newfile_permissions()
    from ibeis.init import main_commands
    from ibeis.init import sysres
    # Display a visible intro message
    msg1 = '''
    _____ ....... _______ _____ _______
      |   |_____| |______   |   |______
    ..|.. |.....| |______s__|__ ______|
    '''
    msg2 = '''
    _____ ______  _______ _____ _______
      |   |_____] |______   |   |______
    __|__ |_____] |______ __|__ ______|
    '''
    if NOT_QUIET:
        print(msg2 if '--myway' not in sys.argv else msg1)
    # Init the only two main system api handles
    ibs = None
    back = None
    if NOT_QUIET:
        print('[main] ibeis.main_module.main()')
    _preload()
    DIAGNOSTICS = NOT_QUIET
    if DIAGNOSTICS:
        import os
        import utool as ut
        import ibeis
        print('[main] MAIN DIAGNOSTICS')
        print('[main]  * username = %r' % (ut.get_user_name()))
        print('[main]  * ibeis.__version__ = %r' % (ibeis.__version__,))
        print('[main]  * computername = %r' % (ut.get_computer_name()))
        print('[main]  * cwd = %r' % (os.getcwd(),))
        print('[main]  * sys.argv = %r' % (sys.argv,))
    # Parse directory to be loaded from command line args
    # and explicit kwargs
    dbdir = sysres.get_args_dbdir(defaultdb, allow_newdir, db, dbdir, cache_priority=False)
    if delete_ibsdir is True:
        from ibeis.other import ibsfuncs
        assert allow_newdir, 'must be making new directory if you are deleting everything!'
        ibsfuncs.delete_ibeis_database(dbdir)
    # Execute preload commands
    main_commands.preload_commands(dbdir, **kwargs)  # PRELOAD CMDS
    try:
        # Build IBEIS Control object
        ibs = _init_ibeis(dbdir)
        if gui and USE_GUI:
            back = _init_gui(activate=kwargs.get('activate', True))
            back.connect_ibeis_control(ibs)
    except Exception as ex:
        print('[main()] IBEIS LOAD imageseted exception: %s %s' % (type(ex), ex))
        raise
    main_commands.postload_commands(ibs, back)  # POSTLOAD CMDS
    main_locals = {'ibs': ibs, 'back': back}
    return main_locals
Example #15
0
def assert_modules():
    """
    checkinfo functions return info_dict
    checkinfo_func

    CommandLine:
        python -m ibeis.tests.assert_modules --test-assert_modules

    Example:
        >>> # DOCTEST_ENABLE
        >>> from ibeis.tests.assert_modules import *   # NOQA
        >>> detailed_msg = assert_modules()
        >>> print(detailed_msg)
    """

    MACHINE_NAME = ut.get_computer_name()

    machine_info_lines = []

    machine_info_lines.append('sys.version = %r ' % (sys.version))
    machine_info_lines.append('PATH = ' + ut.list_str(ut.get_path_dirs()))
    machine_info_lines.append('\n\n\n============================')
    machine_info_lines.append('Begining assert modules main')
    machine_info_lines.append('* MACHINE_NAME = %r' % MACHINE_NAME)
    machine_info_text = '\n'.join(machine_info_lines)
    print(machine_info_text)

    statustext_list = []
    failed_list = []
    fix_list = []

    SHOW_STATUS = not ut.get_argflag(('--nostatus', '--nostat'))

    for checkinfo_wrapper in ASSERT_FUNCS:
        passed, current_version, target, infodict, statustext, suggested_fix = checkinfo_wrapper()
        funcname = get_funcname(checkinfo_wrapper)
        if SHOW_STATUS:
            statustext_list.append(statustext)
        if passed:
            statustext_list.append(funcname + ' ' + str(infodict['__version__']) + ' passed')
            #statustext_list.append('')
        else:
            failed_list.append(funcname + ' FAILED!!!')
            fix_list.append(suggested_fix)
            statustext_list.append(funcname + ' FAILED!!!')
        if SHOW_STATUS:
            statustext_list.append('')

    output_text = '\n'.join(statustext_list)

    failed_text = '\n'.join(failed_list)
    print(output_text)
    print(failed_text)
    check_exist_text = check_modules_exists()
    print(check_exist_text)
    fix_text = ''
    if len(fix_list) > 0:
        fix_text += ('suggested fixes:\n')
        fix_text += ('\n'.join(fix_list) + '\n')
        print(fix_text)

    detailed_msg = '\n'.join([
        machine_info_text,
        output_text,
        failed_text,
        check_exist_text,
        fix_text,
    ])

    return detailed_msg
Example #16
0
def main(
    gui=True,
    dbdir=None,
    defaultdb='cache',
    allow_newdir=False,
    db=None,
    delete_ibsdir=False,
    **kwargs,
):
    """
    Program entry point
    Inits the system environment, an IBEISControl, and a GUI if requested

    Args:
        gui (bool): (default=True) If gui is False a gui instance will not be created
        dbdir (None): full directory of a database to load
        db (None): name of database to load relative to the workdir
        allow_newdir (bool): (default=False) if False an error is raised if a
            a new database is created
        defaultdb (str): codename of database to load if db and dbdir is None. a value
            of 'cache' will open the last database opened with the GUI.

    Returns:
        dict: main_locals
    """
    _preload()
    set_newfile_permissions()
    from wbia.init import main_commands
    from wbia.init import sysres

    # Display a visible intro message
    msg = """
    _____ ______  _______ _____ _______
      |   |_____] |______   |   |______
    __|__ |_____] |______ __|__ ______|
    """
    if NOT_QUIET:
        logger.info(msg)
    # Init the only two main system api handles
    ibs = None
    back = None
    if NOT_QUIET:
        logger.info('[main] wbia.entry_points.main()')
    DIAGNOSTICS = NOT_QUIET
    if DIAGNOSTICS:
        import os
        import utool as ut
        import wbia

        logger.info('[main] MAIN DIAGNOSTICS')
        logger.info('[main]  * username = %r' % (ut.get_user_name()))
        logger.info('[main]  * wbia.__version__ = %r' % (wbia.__version__, ))
        logger.info('[main]  * computername = %r' % (ut.get_computer_name()))
        logger.info('[main]  * cwd = %r' % (os.getcwd(), ))
        logger.info('[main]  * sys.argv = %r' % (sys.argv, ))
    # Parse directory to be loaded from command line args
    # and explicit kwargs
    dbdir = sysres.get_args_dbdir(defaultdb=defaultdb,
                                  allow_newdir=allow_newdir,
                                  db=db,
                                  dbdir=dbdir)
    if delete_ibsdir is True:
        from wbia.other import ibsfuncs

        assert (
            allow_newdir
        ), 'must be making new directory if you are deleting everything!'
        ibsfuncs.delete_wbia_database(dbdir)

    # limit = sys.getrecursionlimit()
    # if limit == 1000:
    #    logger.info('Setting Recursion Limit to 3000')
    #    sys.setrecursionlimit(3000)
    # Execute preload commands
    main_commands.preload_commands(dbdir, **kwargs)  # PRELOAD CMDS
    try:
        # Build IBEIS Control object
        ibs = _init_wbia(dbdir)
        if gui and USE_GUI:
            back = _init_gui(activate=kwargs.get('activate', True))
            back.connect_wbia_control(ibs)
    except Exception as ex:
        logger.info('[main()] IBEIS LOAD encountered exception: %s %s' %
                    (type(ex), ex))
        raise
    main_commands.postload_commands(ibs, back)  # POSTLOAD CMDS
    main_locals = {'ibs': ibs, 'back': back}
    return main_locals
Example #17
0
import requests
from ibeis.control import controller_inject
from ibeis.control import wildbook_manager as wb_man  # NOQA
from ibeis.control.controller_inject import make_ibs_register_decorator
print, rrr, profile = ut.inject2(__name__, '[manual_wildbook]')

CLASS_INJECT_KEY, register_ibs_method = make_ibs_register_decorator(__name__)

register_api   = controller_inject.get_ibeis_flask_api(__name__)
register_route = controller_inject.get_ibeis_flask_route(__name__)


#PREFERED_BROWSER = 'chrome'
#webbrowser._tryorder
PREFERED_BROWSER = None
if ut.get_computer_name() == 'hyrule':
    PREFERED_BROWSER = 'firefox'


@register_ibs_method
def get_wildbook_base_url(ibs, wb_target=None):
    wb_target = ibs.const.WILDBOOK_TARGET if wb_target is None else wb_target
    hostname = '127.0.0.1'
    wb_port = 8080
    wildbook_base_url = 'http://' + str(hostname) + ':' + str(wb_port) + '/' + wb_target
    return wildbook_base_url


@register_ibs_method
def assert_ia_available_for_wb(ibs, wb_target=None):
    # Test if we have a server alive
def get_data_list():
    r"""
    CommandLine:
        python ~/code/ibeis/_installers/ibeis_pyinstaller_data_helper.py --test-get_data_list

    Example:
        >>> # ENABLE_DOCTEST
        >>> from ibeis_pyinstaller_data_helper import *  # NOQA
        >>> result = get_data_list()
        >>> DATATUP_LIST, BINARYTUP_LIST, iconfile = result
        >>> print('DATATUP_LIST = ' + ut.list_str(DATATUP_LIST))
        >>> print('BINARYTUP_LIST = ' + ut.list_str(BINARYTUP_LIST))
        >>> print(len(DATATUP_LIST))
        >>> print(len(BINARYTUP_LIST))
        >>> print(iconfile)

    """
    # Build data before running analysis for quick debugging
    DATATUP_LIST = []
    BINARYTUP_LIST = []

    #import pyhesaff
    #pyhesaff.HESAFF_CLIB.__LIB_FPATH__
    #import pyrf
    #pyrf.RF_CLIB.__LIB_FPATH__
    # Hesaff
    libhesaff_fname = 'libhesaff' + LIB_EXT
    libhesaff_src = realpath(join(root_dir, '..', 'hesaff', 'pyhesaff', libhesaff_fname))
    libhesaff_dst = join(ibsbuild, 'pyhesaff', 'lib', libhesaff_fname)
    DATATUP_LIST.append((libhesaff_dst, libhesaff_src))

    # PyRF
    libpyrf_fname = 'libpyrf' + LIB_EXT
    libpyrf_src = realpath(join(root_dir, '..', 'pyrf', 'pyrf', libpyrf_fname))
    libpyrf_dst = join(ibsbuild, 'pyrf', 'lib', libpyrf_fname)
    DATATUP_LIST.append((libpyrf_dst, libpyrf_src))

    # FLANN
    libflann_fname = 'libflann' + LIB_EXT
    #try:
    #    #import pyflann
    #    #pyflann.__file__
    #    #join(dirname(dirname(pyflann.__file__)), 'build')
    #except ImportError as ex:
    #    print('PYFLANN IS NOT IMPORTABLE')
    #    raise
    #if WIN32 or LINUX:
    # FLANN
    #libflann_src = join_SITE_PACKAGES('pyflann', 'lib', libflann_fname)
    #libflann_dst = join(ibsbuild, libflann_fname)
    #elif APPLE:
    #    # libflann_src = '/pyflann/lib/libflann.dylib'
    #    # libflann_dst = join(ibsbuild, libflann_fname)
    #    libflann_src = join_SITE_PACKAGES('pyflann', 'lib', libflann_fname)
    #    libflann_dst = join(ibsbuild, libflann_fname)
    # This path is when pyflann was built using setup.py develop
    libflann_src = realpath(join(root_dir, '..', 'flann', 'build', 'lib', libflann_fname))
    libflann_dst = join(ibsbuild, 'pyflann', 'lib', libflann_fname)
    DATATUP_LIST.append((libflann_dst, libflann_src))

    # VTool
    vtool_libs = ['libsver']
    for libname in vtool_libs:
        lib_fname = libname + LIB_EXT
        vtlib_src = realpath(join(root_dir, '..', 'vtool', 'vtool', lib_fname))
        vtlib_dst = join(ibsbuild, 'vtool', lib_fname)
        DATATUP_LIST.append((vtlib_dst, vtlib_src))

    linux_lib_dpaths = [
        '/usr/lib/x86_64-linux-gnu',
        '/usr/lib',
        '/usr/local/lib'
    ]

    # OpenMP
    if APPLE:
        # BSDDB, Fix for the modules that PyInstaller needs and (for some reason)
        # are not being added by PyInstaller
        libbsddb_src = '/opt/local/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/lib-dynload/_bsddb.so'
        libbsddb_dst = join(ibsbuild, '_bsddb.so')
        DATATUP_LIST.append((libbsddb_dst, libbsddb_src))
        #libgomp_src = '/opt/local/lib/libgomp.dylib'
        libgomp_src = '/opt/local/lib/gcc48/libgomp.dylib'
        BINARYTUP_LIST.append(('libgomp.1.dylib', libgomp_src, 'BINARY'))

        # very hack
        libiomp_src = '/Users/bluemellophone/code/libomp_oss/exports/mac_32e/lib.thin/libiomp5.dylib'
        BINARYTUP_LIST.append(('libiomp5.dylib', libiomp_src, 'BINARY'))

    if LINUX:
        libgomp_src = ut.search_in_dirs('libgomp.so.1', linux_lib_dpaths)
        ut.assertpath(libgomp_src)
        BINARYTUP_LIST.append(('libgomp.so.1', libgomp_src, 'BINARY'))

    # MinGW
    if WIN32:
        mingw_root = r'C:\MinGW\bin'
        mingw_dlls = ['libgcc_s_dw2-1.dll', 'libstdc++-6.dll', 'libgomp-1.dll', 'pthreadGC2.dll']
        for lib_fname in mingw_dlls:
            lib_src = join(mingw_root, lib_fname)
            lib_dst = join(ibsbuild, lib_fname)
            DATATUP_LIST.append((lib_dst, lib_src))

    # We need to add these 4 opencv libraries because pyinstaller does not find them.
    #OPENCV_EXT = {'win32': '248.dll',
    #              'darwin': '.2.4.dylib',
    #              'linux2': '.so.2.4'}[PLATFORM]

    target_cv_version = '3.0.0'

    OPENCV_EXT = {'win32': target_cv_version.replace('.', '') + '.dll',
                  'darwin': '.' + target_cv_version + '.dylib',
                  'linux2': '.so.' + target_cv_version}[PLATFORM]

    missing_cv_name_list = [
        'libopencv_videostab',
        'libopencv_superres',
        'libopencv_stitching',
        #'libopencv_gpu',
        'libopencv_core',
        'libopencv_highgui',
        'libopencv_imgproc',
    ]
    # Hack to find the appropriate opencv libs
    for name in missing_cv_name_list:
        fname = name + OPENCV_EXT
        src = ''
        dst = ''
        if APPLE:
            src = join('/opt/local/lib', fname)
        elif LINUX:
            #src = join('/usr/lib', fname)
            src, tried = ut.search_in_dirs(fname, linux_lib_dpaths, strict=True, return_tried=True)
        elif WIN32:
            if ut.get_computer_name() == 'Ooo':
                src = join(r'C:/Program Files (x86)/OpenCV/x86/mingw/bin', fname)
            else:
                src = join(root_dir, '../opencv/build/bin', fname)
        dst = join(ibsbuild, fname)
        # ut.assertpath(src)
        DATATUP_LIST.append((dst, src))

    ##################################
    # QT Gui dependencies
    ##################################
    if APPLE:
        walk_path = '/opt/local/Library/Frameworks/QtGui.framework/Versions/4/Resources/qt_menu.nib'
        for root, dirs, files in os.walk(walk_path):
            for lib_fname in files:
                toc_src = join(walk_path, lib_fname)
                toc_dst = join('qt_menu.nib', lib_fname)
                DATATUP_LIST.append((toc_dst, toc_src))

    ##################################
    # Documentation, Icons, and Web Assets
    ##################################
    # Documentation
    #userguide_dst = join('.', '_docs', 'IBEISUserGuide.pdf')
    #userguide_src = join(root_dir, '_docs', 'IBEISUserGuide.pdf')
    #DATATUP_LIST.append((userguide_dst, userguide_src))

    # Icon File
    ICON_EXT = {'darwin': '.icns',
                'win32':  '.ico',
                'linux2': '.ico'}[PLATFORM]
    iconfile = join('_installers', 'ibsicon' + ICON_EXT)
    icon_src = join(root_dir, iconfile)
    icon_dst = join(ibsbuild, iconfile)
    DATATUP_LIST.append((icon_dst, icon_src))

    print('[installer] Checking Data (preweb)')
    try:
        for (dst, src) in DATATUP_LIST:
            assert ut.checkpath(src, verbose=True), 'checkpath for src=%r failed' % (src,)
    except Exception as ex:
        ut.printex(ex, 'Checking data failed DATATUP_LIST=' + ut.list_str(DATATUP_LIST))
        raise

    # Web Assets
    INSTALL_WEB = True and not ut.get_argflag('--noweb')
    if INSTALL_WEB:
        web_root = join('ibeis', 'web/')
        #walk_path = join(web_root, 'static')
        #static_data = []
        #for root, dirs, files in os.walk(walk_path):
        #    root2 = root.replace(web_root, '')
        #    for icon_fname in files:
        #        if '.DS_Store' not in icon_fname:
        #            toc_src = join(abspath(root), icon_fname)
        #            toc_dst = join(root2, icon_fname)
        #            static_data.append((toc_dst, toc_src))
        #ut.get_list_column(static_data, 1) == ut.glob(walk_path, '*', recursive=True, with_dirs=False, exclude_dirs=['.DS_Store'])
        static_src_list = ut.glob(join(web_root, 'static'), '*', recursive=True, with_dirs=False, exclude_dirs=['.DS_Store'])
        static_dst_list = [relpath(src, join(root_dir, 'ibeis')) for src in static_src_list]
        static_data = zip(static_dst_list, static_src_list)
        DATATUP_LIST.extend(static_data)

        #walk_path = join(web_root, 'templates')
        #template_data = []
        #for root, dirs, files in os.walk(walk_path):
        #    root2 = root.replace(web_root, '')
        #    for icon_fname in files:
        #        if '.DS_Store' not in icon_fname:
        #            toc_src = join(abspath(root), icon_fname)
        #            toc_dst = join(root2, icon_fname)
        #            template_data.append((toc_dst, toc_src))
        template_src_list = ut.glob(join(web_root, 'templates'), '*', recursive=True, with_dirs=False, exclude_dirs=['.DS_Store'])
        template_dst_list = [relpath(src, join(root_dir, 'ibeis')) for src in template_src_list]
        template_data = zip(template_dst_list, template_src_list)
        DATATUP_LIST.extend(template_data)

    print('[installer] Checking Data (postweb)')
    try:
        for (dst, src) in DATATUP_LIST:
            assert ut.checkpath(src, verbose=False), 'checkpath for src=%r failed' % (src,)
    except Exception as ex:
        ut.printex(ex, 'Checking data failed DATATUP_LIST=' + ut.list_str(DATATUP_LIST))
        raise

    return DATATUP_LIST, BINARYTUP_LIST, iconfile
Example #19
0
                            UNGROUPED_IMAGES_ENCTEXT]
NEW_ENCOUNTER_ENCTEXT = 'NEW ENCOUNTER'

#IMAGE_THUMB_SUFFIX = '_thumb.png'
#CHIP_THUMB_SUFFIX  = '_chip_thumb.png'
IMAGE_THUMB_SUFFIX = '_thumb.jpg'
IMAGE_BARE_THUMB_SUFFIX = '_thumb_bare.jpg'
CHIP_THUMB_SUFFIX  = '_chip_thumb.jpg'


VS_EXEMPLARS_KEY = 'vs_exemplars'
INTRA_ENC_KEY = 'intra_encounter'

HARD_NOTE_TAG = '<HARDCASE>'

if ut.get_computer_name() == 'ibeis.cs.uic.edu':
    WILDBOOK_TARGET = ut.get_argval('--wildbook-target', type_=str, default='prod',
                                    help_='specify the Wildbook target deployment')
else:
    WILDBOOK_TARGET = ut.get_argval('--wildbook-target', type_=str, default='ibeis',
                                    help_='specify the Wildbook target deployment')


class ZIPPED_URLS(object):
    PZ_MTEST       = 'https://www.dropbox.com/s/xdae2yvsp57l4t2/PZ_MTEST.zip'
    NAUTS          = 'https://www.dropbox.com/s/8gt3eaiw8rb31rh/NAUT_test.zip'
    PZ_DISTINCTIVE = 'https://www.dropbox.com/s/gbp24qks9z3fzm6/distinctivness_zebra_plains.zip'
    GZ_DISTINCTIVE = 'https://www.dropbox.com/s/nb5gv7wibwo3ib4/distinctivness_zebra_grevys.zip'

if six.PY2:
    __STR__ = unicode  # change to str if needed
Example #20
0
def demo2():
    """
    CommandLine:
        python -m wbia.algo.graph.demo demo2 --viz
        python -m wbia.algo.graph.demo demo2

    Example:
        >>> # DISABLE_DOCTEST
        >>> from wbia.algo.graph.demo import *  # NOQA
        >>> result = demo2()
        >>> print(result)
    """
    import wbia.plottool as pt

    from wbia.scripts.thesis import TMP_RC
    import matplotlib as mpl

    mpl.rcParams.update(TMP_RC)

    # ---- Synthetic data params
    params = {
        'redun.pos': 2,
        'redun.neg': 2,
    }
    # oracle_accuracy = .98
    # oracle_accuracy = .90
    # oracle_accuracy = (.8, 1.0)
    oracle_accuracy = (0.85, 1.0)
    # oracle_accuracy = 1.0

    # --- draw params

    VISUALIZE = ut.get_argflag('--viz')
    # QUIT_OR_EMEBED = 'embed'
    QUIT_OR_EMEBED = 'quit'
    TARGET_REVIEW = ut.get_argval('--target', type_=int, default=None)
    START = ut.get_argval('--start', type_=int, default=None)
    END = ut.get_argval('--end', type_=int, default=None)

    # ------------------

    # rng = np.random.RandomState(42)

    # infr = demodata_infr(num_pccs=4, size=3, size_std=1, p_incon=0)
    # infr = demodata_infr(num_pccs=6, size=7, size_std=1, p_incon=0)
    # infr = demodata_infr(num_pccs=3, size=5, size_std=.2, p_incon=0)
    infr = demodata_infr(pcc_sizes=[5, 2, 4])
    infr.verbose = 100
    # apply_dummy_viewpoints(infr)
    # infr.ensure_cliques()
    infr.ensure_cliques()
    infr.ensure_full()
    # infr.apply_edge_truth()
    # Dummy scoring

    infr.init_simulation(oracle_accuracy=oracle_accuracy, name='demo2')

    # infr_gt = infr.copy()

    dpath = ut.ensuredir(ut.truepath('~/Desktop/demo'))
    ut.remove_files_in_dir(dpath)

    fig_counter = it.count(0)

    def show_graph(infr, title, final=False, selected_edges=None):
        if not VISUALIZE:
            return
        # TODO: rich colored text?
        latest = '\n'.join(infr.latest_logs())
        showkw = dict(
            # fontsize=infr.graph.graph['fontsize'],
            # fontname=infr.graph.graph['fontname'],
            show_unreviewed_edges=True,
            show_inferred_same=False,
            show_inferred_diff=False,
            outof=(len(infr.aids)),
            # show_inferred_same=True,
            # show_inferred_diff=True,
            selected_edges=selected_edges,
            show_labels=True,
            simple_labels=True,
            # show_recent_review=not final,
            show_recent_review=False,
            # splines=infr.graph.graph['splines'],
            reposition=False,
            # with_colorbar=True
        )
        verbose = infr.verbose
        infr.verbose = 0
        infr_ = infr.copy()
        infr_ = infr
        infr_.verbose = verbose
        infr_.show(pickable=True, verbose=0, **showkw)
        infr.verbose = verbose
        # logger.info('status ' + ut.repr4(infr_.status()))
        # infr.show(**showkw)
        ax = pt.gca()
        pt.set_title(title, fontsize=20)
        fig = pt.gcf()
        fontsize = 22
        if True:
            # postprocess xlabel
            lines = []
            for line in latest.split('\n'):
                if False and line.startswith('ORACLE ERROR'):
                    lines += ['ORACLE ERROR']
                else:
                    lines += [line]
            latest = '\n'.join(lines)
            if len(lines) > 10:
                fontsize = 16
            if len(lines) > 12:
                fontsize = 14
            if len(lines) > 14:
                fontsize = 12
            if len(lines) > 18:
                fontsize = 10

            if len(lines) > 23:
                fontsize = 8

        if True:
            pt.adjust_subplots(top=0.95, left=0, right=1, bottom=0.45, fig=fig)
            ax.set_xlabel('\n' + latest)
            xlabel = ax.get_xaxis().get_label()
            xlabel.set_horizontalalignment('left')
            # xlabel.set_x(.025)
            xlabel.set_x(-0.6)
            # xlabel.set_fontname('CMU Typewriter Text')
            xlabel.set_fontname('Inconsolata')
            xlabel.set_fontsize(fontsize)
        ax.set_aspect('equal')

        # ax.xaxis.label.set_color('red')

        from os.path import join

        fpath = join(dpath, 'demo_{:04d}.png'.format(next(fig_counter)))
        fig.savefig(
            fpath,
            dpi=300,
            # transparent=True,
            edgecolor='none',
        )

        # pt.save_figure(dpath=dpath, dpi=300)
        infr.latest_logs()

    if VISUALIZE:
        infr.update_visual_attrs(groupby='name_label')
        infr.set_node_attrs('pin', 'true')
        node_dict = ut.nx_node_dict(infr.graph)
        logger.info(ut.repr4(node_dict[1]))

    if VISUALIZE:
        infr.latest_logs()
        # Pin Nodes into the target groundtruth position
        show_graph(infr, 'target-gt')

    logger.info(ut.repr4(infr.status()))
    infr.clear_feedback()
    infr.clear_name_labels()
    infr.clear_edges()
    logger.info(ut.repr4(infr.status()))
    infr.latest_logs()

    if VISUALIZE:
        infr.update_visual_attrs()

    infr.prioritize('prob_match')
    if VISUALIZE or TARGET_REVIEW is None or TARGET_REVIEW == 0:
        show_graph(infr, 'initial state')

    def on_new_candidate_edges(infr, edges):
        # hack updateing visual attrs as a callback
        infr.update_visual_attrs()

    infr.on_new_candidate_edges = on_new_candidate_edges

    infr.params.update(**params)
    infr.refresh_candidate_edges()

    VIZ_ALL = VISUALIZE and TARGET_REVIEW is None and START is None
    logger.info('VIZ_ALL = %r' % (VIZ_ALL, ))

    if VIZ_ALL or TARGET_REVIEW == 0:
        show_graph(infr, 'find-candidates')

    # _iter2 = enumerate(infr.generate_reviews(**params))
    # _iter2 = list(_iter2)
    # assert len(_iter2) > 0

    # prog = ut.ProgIter(_iter2, label='demo2', bs=False, adjust=False,
    #                    enabled=False)
    count = 1
    first = 1
    for edge, priority in infr._generate_reviews(data=True):
        msg = 'review #%d, priority=%.3f' % (count, priority)
        logger.info('\n----------')
        infr.print('pop edge {} with priority={:.3f}'.format(edge, priority))
        # logger.info('remaining_reviews = %r' % (infr.remaining_reviews()),)
        # Make the next review

        if START is not None:
            VIZ_ALL = count >= START

        if END is not None and count >= END:
            break

        infr.print(msg)
        if ut.allsame(infr.pos_graph.node_labels(*edge)) and first:
            # Have oracle make a mistake early
            feedback = infr.request_oracle_review(edge, accuracy=0)
            first -= 1
        else:
            feedback = infr.request_oracle_review(edge)

        AT_TARGET = TARGET_REVIEW is not None and count >= TARGET_REVIEW - 1

        SHOW_CANDIATE_POP = True
        if SHOW_CANDIATE_POP and (VIZ_ALL or AT_TARGET):
            # import utool
            # utool.embed()
            infr.print(
                ut.repr2(infr.task_probs['match_state'][edge],
                         precision=4,
                         si=True))
            infr.print('len(queue) = %r' % (len(infr.queue)))
            # Show edge selection
            infr.print('Oracle will predict: ' + feedback['evidence_decision'])
            show_graph(infr, 'pre' + msg, selected_edges=[edge])

        if count == TARGET_REVIEW:
            infr.EMBEDME = QUIT_OR_EMEBED == 'embed'
        infr.add_feedback(edge, **feedback)
        infr.print('len(queue) = %r' % (len(infr.queue)))
        # infr.apply_nondynamic_update()
        # Show the result
        if VIZ_ALL or AT_TARGET:
            show_graph(infr, msg)
            # import sys
            # sys.exit(1)
        if count == TARGET_REVIEW:
            break
        count += 1

    infr.print('status = ' + ut.repr4(infr.status(extended=False)))
    show_graph(infr, 'post-review (#reviews={})'.format(count), final=True)

    # ROUND 2 FIGHT
    # if TARGET_REVIEW is None and round2_params is not None:
    #     # HACK TO GET NEW THINGS IN QUEUE
    #     infr.params = round2_params

    #     _iter2 = enumerate(infr.generate_reviews(**params))
    #     prog = ut.ProgIter(_iter2, label='round2', bs=False, adjust=False,
    #                        enabled=False)
    #     for count, (aid1, aid2) in prog:
    #         msg = 'reviewII #%d' % (count)
    #         logger.info('\n----------')
    #         logger.info(msg)
    #         logger.info('remaining_reviews = %r' % (infr.remaining_reviews()),)
    #         # Make the next review evidence_decision
    #         feedback = infr.request_oracle_review(edge)
    #         if count == TARGET_REVIEW:
    #             infr.EMBEDME = QUIT_OR_EMEBED == 'embed'
    #         infr.add_feedback(edge, **feedback)
    #         # Show the result
    #         if PRESHOW or TARGET_REVIEW is None or count >= TARGET_REVIEW - 1:
    #             show_graph(infr, msg)
    #         if count == TARGET_REVIEW:
    #             break

    #     show_graph(infr, 'post-re-review', final=True)

    if not getattr(infr, 'EMBEDME', False):
        if ut.get_computer_name().lower() in ['hyrule', 'ooo']:
            pt.all_figures_tile(monitor_num=0, percent_w=0.5)
        else:
            pt.all_figures_tile()
        ut.show_if_requested()
def get_data_list():
    r"""
    CommandLine:
        python ~/code/ibeis/_installers/ibeis_pyinstaller_data_helper.py --test-get_data_list

    Example:
        >>> # ENABLE_DOCTEST
        >>> from ibeis_pyinstaller_data_helper import *  # NOQA
        >>> result = get_data_list()
        >>> DATATUP_LIST, BINARYTUP_LIST, iconfile = result
        >>> print('DATATUP_LIST = ' + ut.list_str(DATATUP_LIST))
        >>> print('BINARYTUP_LIST = ' + ut.list_str(BINARYTUP_LIST))
        >>> print(len(DATATUP_LIST))
        >>> print(len(BINARYTUP_LIST))
        >>> print(iconfile)

    """
    # Build data before running analysis for quick debugging
    DATATUP_LIST = []
    BINARYTUP_LIST = []

    #import pyhesaff
    #pyhesaff.HESAFF_CLIB.__LIB_FPATH__
    #import pyrf
    #pyrf.RF_CLIB.__LIB_FPATH__
    # Hesaff
    libhesaff_fname = 'libhesaff' + LIB_EXT
    libhesaff_src = realpath(
        join(root_dir, '..', 'hesaff', 'pyhesaff', libhesaff_fname))
    libhesaff_dst = join(ibsbuild, 'pyhesaff', 'lib', libhesaff_fname)
    DATATUP_LIST.append((libhesaff_dst, libhesaff_src))

    # PyRF
    libpyrf_fname = 'libpyrf' + LIB_EXT
    libpyrf_src = realpath(join(root_dir, '..', 'pyrf', 'pyrf', libpyrf_fname))
    libpyrf_dst = join(ibsbuild, 'pyrf', 'lib', libpyrf_fname)
    DATATUP_LIST.append((libpyrf_dst, libpyrf_src))

    # FLANN
    libflann_fname = 'libflann' + LIB_EXT
    #try:
    #    #import pyflann
    #    #pyflann.__file__
    #    #join(dirname(dirname(pyflann.__file__)), 'build')
    #except ImportError as ex:
    #    print('PYFLANN IS NOT IMPORTABLE')
    #    raise
    #if WIN32 or LINUX:
    # FLANN
    #libflann_src = join_SITE_PACKAGES('pyflann', 'lib', libflann_fname)
    #libflann_dst = join(ibsbuild, libflann_fname)
    #elif APPLE:
    #    # libflann_src = '/pyflann/lib/libflann.dylib'
    #    # libflann_dst = join(ibsbuild, libflann_fname)
    #    libflann_src = join_SITE_PACKAGES('pyflann', 'lib', libflann_fname)
    #    libflann_dst = join(ibsbuild, libflann_fname)
    # This path is when pyflann was built using setup.py develop
    libflann_src = realpath(
        join(root_dir, '..', 'flann', 'build', 'lib', libflann_fname))
    libflann_dst = join(ibsbuild, 'pyflann', 'lib', libflann_fname)
    DATATUP_LIST.append((libflann_dst, libflann_src))

    # VTool
    vtool_libs = ['libsver']
    for libname in vtool_libs:
        lib_fname = libname + LIB_EXT
        vtlib_src = realpath(join(root_dir, '..', 'vtool', 'vtool', lib_fname))
        vtlib_dst = join(ibsbuild, 'vtool', lib_fname)
        DATATUP_LIST.append((vtlib_dst, vtlib_src))

    linux_lib_dpaths = [
        '/usr/lib/x86_64-linux-gnu', '/usr/lib', '/usr/local/lib'
    ]

    # OpenMP
    if APPLE:
        # BSDDB, Fix for the modules that PyInstaller needs and (for some reason)
        # are not being added by PyInstaller
        libbsddb_src = '/opt/local/Library/Frameworks/Python.framework/Versions/2.7/lib/python2.7/lib-dynload/_bsddb.so'
        libbsddb_dst = join(ibsbuild, '_bsddb.so')
        DATATUP_LIST.append((libbsddb_dst, libbsddb_src))
        #libgomp_src = '/opt/local/lib/libgomp.dylib'
        libgomp_src = '/opt/local/lib/gcc48/libgomp.dylib'
        BINARYTUP_LIST.append(('libgomp.1.dylib', libgomp_src, 'BINARY'))

        # very hack
        libiomp_src = '/Users/bluemellophone/code/libomp_oss/exports/mac_32e/lib.thin/libiomp5.dylib'
        BINARYTUP_LIST.append(('libiomp5.dylib', libiomp_src, 'BINARY'))

    if LINUX:
        libgomp_src = ut.search_in_dirs('libgomp.so.1', linux_lib_dpaths)
        ut.assertpath(libgomp_src)
        BINARYTUP_LIST.append(('libgomp.so.1', libgomp_src, 'BINARY'))

    # MinGW
    if WIN32:
        mingw_root = r'C:\MinGW\bin'
        mingw_dlls = [
            'libgcc_s_dw2-1.dll', 'libstdc++-6.dll', 'libgomp-1.dll',
            'pthreadGC2.dll'
        ]
        for lib_fname in mingw_dlls:
            lib_src = join(mingw_root, lib_fname)
            lib_dst = join(ibsbuild, lib_fname)
            DATATUP_LIST.append((lib_dst, lib_src))

    # We need to add these 4 opencv libraries because pyinstaller does not find them.
    #OPENCV_EXT = {'win32': '248.dll',
    #              'darwin': '.2.4.dylib',
    #              'linux2': '.so.2.4'}[PLATFORM]

    target_cv_version = '3.0.0'

    OPENCV_EXT = {
        'win32': target_cv_version.replace('.', '') + '.dll',
        'darwin': '.' + target_cv_version + '.dylib',
        'linux2': '.so.' + target_cv_version
    }[PLATFORM]

    missing_cv_name_list = [
        'libopencv_videostab',
        'libopencv_superres',
        'libopencv_stitching',
        #'libopencv_gpu',
        'libopencv_core',
        'libopencv_highgui',
        'libopencv_imgproc',
    ]
    # Hack to find the appropriate opencv libs
    for name in missing_cv_name_list:
        fname = name + OPENCV_EXT
        src = ''
        dst = ''
        if APPLE:
            src = join('/opt/local/lib', fname)
        elif LINUX:
            #src = join('/usr/lib', fname)
            src, tried = ut.search_in_dirs(fname,
                                           linux_lib_dpaths,
                                           strict=True,
                                           return_tried=True)
        elif WIN32:
            if ut.get_computer_name() == 'Ooo':
                src = join(r'C:/Program Files (x86)/OpenCV/x86/mingw/bin',
                           fname)
            else:
                src = join(root_dir, '../opencv/build/bin', fname)
        dst = join(ibsbuild, fname)
        # ut.assertpath(src)
        DATATUP_LIST.append((dst, src))

    ##################################
    # QT Gui dependencies
    ##################################
    if APPLE:
        walk_path = '/opt/local/Library/Frameworks/QtGui.framework/Versions/4/Resources/qt_menu.nib'
        for root, dirs, files in os.walk(walk_path):
            for lib_fname in files:
                toc_src = join(walk_path, lib_fname)
                toc_dst = join('qt_menu.nib', lib_fname)
                DATATUP_LIST.append((toc_dst, toc_src))

    ##################################
    # Documentation, Icons, and Web Assets
    ##################################
    # Documentation
    #userguide_dst = join('.', '_docs', 'IBEISUserGuide.pdf')
    #userguide_src = join(root_dir, '_docs', 'IBEISUserGuide.pdf')
    #DATATUP_LIST.append((userguide_dst, userguide_src))

    # Icon File
    ICON_EXT = {'darwin': '.icns', 'win32': '.ico', 'linux2': '.ico'}[PLATFORM]
    iconfile = join('_installers', 'ibsicon' + ICON_EXT)
    icon_src = join(root_dir, iconfile)
    icon_dst = join(ibsbuild, iconfile)
    DATATUP_LIST.append((icon_dst, icon_src))

    print('[installer] Checking Data (preweb)')
    try:
        for (dst, src) in DATATUP_LIST:
            assert ut.checkpath(
                src, verbose=True), 'checkpath for src=%r failed' % (src, )
    except Exception as ex:
        ut.printex(
            ex,
            'Checking data failed DATATUP_LIST=' + ut.list_str(DATATUP_LIST))
        raise

    # Web Assets
    INSTALL_WEB = True and not ut.get_argflag('--noweb')
    if INSTALL_WEB:
        web_root = join('ibeis', 'web/')
        #walk_path = join(web_root, 'static')
        #static_data = []
        #for root, dirs, files in os.walk(walk_path):
        #    root2 = root.replace(web_root, '')
        #    for icon_fname in files:
        #        if '.DS_Store' not in icon_fname:
        #            toc_src = join(abspath(root), icon_fname)
        #            toc_dst = join(root2, icon_fname)
        #            static_data.append((toc_dst, toc_src))
        #ut.get_list_column(static_data, 1) == ut.glob(walk_path, '*', recursive=True, with_dirs=False, exclude_dirs=['.DS_Store'])
        static_src_list = ut.glob(join(web_root, 'static'),
                                  '*',
                                  recursive=True,
                                  with_dirs=False,
                                  exclude_dirs=['.DS_Store'])
        static_dst_list = [
            relpath(src, join(root_dir, 'ibeis')) for src in static_src_list
        ]
        static_data = zip(static_dst_list, static_src_list)
        DATATUP_LIST.extend(static_data)

        #walk_path = join(web_root, 'templates')
        #template_data = []
        #for root, dirs, files in os.walk(walk_path):
        #    root2 = root.replace(web_root, '')
        #    for icon_fname in files:
        #        if '.DS_Store' not in icon_fname:
        #            toc_src = join(abspath(root), icon_fname)
        #            toc_dst = join(root2, icon_fname)
        #            template_data.append((toc_dst, toc_src))
        template_src_list = ut.glob(join(web_root, 'templates'),
                                    '*',
                                    recursive=True,
                                    with_dirs=False,
                                    exclude_dirs=['.DS_Store'])
        template_dst_list = [
            relpath(src, join(root_dir, 'ibeis')) for src in template_src_list
        ]
        template_data = zip(template_dst_list, template_src_list)
        DATATUP_LIST.extend(template_data)

    print('[installer] Checking Data (postweb)')
    try:
        for (dst, src) in DATATUP_LIST:
            assert ut.checkpath(
                src, verbose=False), 'checkpath for src=%r failed' % (src, )
    except Exception as ex:
        ut.printex(
            ex,
            'Checking data failed DATATUP_LIST=' + ut.list_str(DATATUP_LIST))
        raise

    return DATATUP_LIST, BINARYTUP_LIST, iconfile
Example #22
0
def main(gui=True,
         dbdir=None,
         defaultdb='cache',
         allow_newdir=False,
         db=None,
         delete_ibsdir=False,
         **kwargs):
    """
    Program entry point
    Inits the system environment, an IBEISControl, and a GUI if requested

    Args:
        gui (bool): (default=True) If gui is False a gui instance will not be created
        dbdir (None): full directory of a database to load
        db (None): name of database to load relative to the workdir
        allow_newdir (bool): (default=False) if False an error is raised if a
            a new database is created
        defaultdb (str): codename of database to load if db and dbdir is None. a value
            of 'cache' will open the last database opened with the GUI.

    Returns:
        dict: main_locals
    """
    set_newfile_permissions()
    from ibeis.init import main_commands
    from ibeis.init import sysres
    # Display a visible intro message
    msg1 = '''
    _____ ....... _______ _____ _______
      |   |_____| |______   |   |______
    ..|.. |.....| |______s__|__ ______|
    '''
    msg2 = '''
    _____ ______  _______ _____ _______
      |   |_____] |______   |   |______
    __|__ |_____] |______ __|__ ______|
    '''
    if NOT_QUIET:
        print(msg2 if '--myway' not in sys.argv else msg1)
    # Init the only two main system api handles
    ibs = None
    back = None
    if NOT_QUIET:
        print('[main] ibeis.main_module.main()')
    _preload()
    DIAGNOSTICS = NOT_QUIET
    if DIAGNOSTICS:
        import os
        import utool as ut
        import ibeis
        print('[main] MAIN DIAGNOSTICS')
        print('[main]  * username = %r' % (ut.get_user_name()))
        print('[main]  * ibeis.__version__ = %r' % (ibeis.__version__, ))
        print('[main]  * computername = %r' % (ut.get_computer_name()))
        print('[main]  * cwd = %r' % (os.getcwd(), ))
        print('[main]  * sys.argv = %r' % (sys.argv, ))
    # Parse directory to be loaded from command line args
    # and explicit kwargs
    dbdir = sysres.get_args_dbdir(defaultdb,
                                  allow_newdir,
                                  db,
                                  dbdir,
                                  cache_priority=False)
    if delete_ibsdir is True:
        from ibeis.other import ibsfuncs
        assert allow_newdir, 'must be making new directory if you are deleting everything!'
        ibsfuncs.delete_ibeis_database(dbdir)
    # Execute preload commands
    main_commands.preload_commands(dbdir, **kwargs)  # PRELOAD CMDS
    try:
        # Build IBEIS Control object
        ibs = _init_ibeis(dbdir)
        if gui and USE_GUI:
            back = _init_gui(activate=kwargs.get('activate', True))
            back.connect_ibeis_control(ibs)
    except Exception as ex:
        print('[main()] IBEIS LOAD imageseted exception: %s %s' %
              (type(ex), ex))
        raise
    main_commands.postload_commands(ibs, back)  # POSTLOAD CMDS
    main_locals = {'ibs': ibs, 'back': back}
    return main_locals
Example #23
0
]
NEW_IMAGESET_IMAGESETTEXT = 'NEW IMAGESET'

#IMAGE_THUMB_SUFFIX = '_thumb.png'
#CHIP_THUMB_SUFFIX  = '_chip_thumb.png'
IMAGE_THUMB_SUFFIX = '_thumb.jpg'
IMAGE_BARE_THUMB_SUFFIX = '_thumb_bare.jpg'
CHIP_THUMB_SUFFIX = '_chip_thumb.jpg'

VS_EXEMPLARS_KEY = 'vs_exemplars'
INTRA_OCCUR_KEY = 'intra_occurrence'

HARD_NOTE_TAG = '<HARDCASE>'

# HACK
if ut.get_computer_name() == 'ibeis.cs.uic.edu':
    #_DEFAULT_WILDBOOK_TARGET = 'prod'
    _DEFAULT_WILDBOOK_TARGET = 'lewa2'
else:
    _DEFAULT_WILDBOOK_TARGET = 'ibeis'
WILDBOOK_TARGET = ut.get_argval('--wildbook-target',
                                type_=str,
                                default=_DEFAULT_WILDBOOK_TARGET,
                                help_='specify the Wildbook target deployment')


class ZIPPED_URLS(object):
    PZ_MTEST = 'https://lev.cs.rpi.edu/public/databases/PZ_MTEST.zip'
    NAUTS = 'https://lev.cs.rpi.edu/public/databases/NAUT_test.zip'
    WDS = 'https://lev.cs.rpi.edu/public/databases/wd_peter2.zip'
    PZ_DISTINCTIVE = 'https://lev.cs.rpi.edu/public/models/distinctivness_zebra_plains.zip'