Example #1
0
 def _spawner(func, *args, **kwargs):
     if wait != 0:
         print('Waiting for background process (%s) to spin up' % (ut.get_funcname(func,)))
     proc = ut.spawn_background_process(func, *args, **kwargs)
     time.sleep(wait)
     assert proc.is_alive(), 'proc (%s) died too soon' % (ut.get_funcname(func,))
     return proc
Example #2
0
def request_background_nnindexer(qreq_, daid_list):
    r""" FIXME: Duplicate code

    Args:
        qreq_ (QueryRequest):  query request object with hyper-parameters
        daid_list (list):

    CommandLine:
        python -m wbia.algo.hots.neighbor_index_cache --test-request_background_nnindexer

    Example:
        >>> # DISABLE_DOCTEST
        >>> from wbia.algo.hots.neighbor_index_cache import *  # NOQA
        >>> import wbia
        >>> # build test data
        >>> ibs = wbia.opendb('testdb1')
        >>> daid_list = ibs.get_valid_aids(species=wbia.const.TEST_SPECIES.ZEB_PLAIN)
        >>> qreq_ = ibs.new_query_request(daid_list, daid_list)
        >>> # execute function
        >>> request_background_nnindexer(qreq_, daid_list)
        >>> # verify results
        >>> result = str(False)
        >>> print(result)
    """
    global CURRENT_THREAD
    logger.info('Requesting background reindex')
    if not can_request_background_nnindexer():
        # Make sure this function doesn't run if it is already running
        logger.info('REQUEST DENIED')
        return False
    logger.info('REQUEST ACCPETED')
    daids_hashid = qreq_.ibs.get_annot_hashid_visual_uuid(daid_list)
    cfgstr = build_nnindex_cfgstr(qreq_, daid_list)
    cachedir = qreq_.ibs.get_flann_cachedir()
    # Save inverted cache uuid mappings for
    min_reindex_thresh = qreq_.qparams.min_reindex_thresh
    # Grab the keypoints names and image ids before query time?
    flann_params = qreq_.qparams.flann_params
    # Get annot descriptors to index
    vecs_list, fgws_list, fxs_list = get_support_data(qreq_, daid_list)
    # Dont hash rowids when given enough info in nnindex_cfgstr
    flann_params['cores'] = 2  # Only ues a few cores in the background
    # Build/Load the flann index
    uuid_map_fpath = get_nnindexer_uuid_map_fpath(qreq_)
    visual_uuid_list = qreq_.ibs.get_annot_visual_uuids(daid_list)

    # set temporary attribute for when the thread finishes
    finishtup = (uuid_map_fpath, daids_hashid, visual_uuid_list, min_reindex_thresh)
    CURRENT_THREAD = ut.spawn_background_process(
        background_flann_func,
        cachedir,
        daid_list,
        vecs_list,
        fgws_list,
        fxs_list,
        flann_params,
        cfgstr,
    )

    CURRENT_THREAD.finishtup = finishtup
Example #3
0
 def _spawner(func, *args, **kwargs):
     if wait != 0:
         print('Waiting for background process (%s) to spin up' %
               (ut.get_funcname(func, )))
     proc = ut.spawn_background_process(func, *args, **kwargs)
     # time.sleep(wait)
     assert proc.is_alive(), 'proc (%s) died too soon' % (
         ut.get_funcname(func, ))
     return proc
Example #4
0
def request_background_nnindexer(qreq_, daid_list):
    r""" FIXME: Duplicate code

    Args:
        qreq_ (QueryRequest):  query request object with hyper-parameters
        daid_list (list):

    CommandLine:
        python -m ibeis.algo.hots.neighbor_index_cache --test-request_background_nnindexer

    Example:
        >>> # DISABLE_DOCTEST
        >>> from ibeis.algo.hots.neighbor_index_cache import *  # NOQA
        >>> import ibeis
        >>> # build test data
        >>> ibs = ibeis.opendb('testdb1')
        >>> daid_list = ibs.get_valid_aids(species=ibeis.const.TEST_SPECIES.ZEB_PLAIN)
        >>> qreq_ = ibs.new_query_request(daid_list, daid_list)
        >>> # execute function
        >>> request_background_nnindexer(qreq_, daid_list)
        >>> # verify results
        >>> result = str(False)
        >>> print(result)
    """
    global CURRENT_THREAD
    print('Requesting background reindex')
    if not can_request_background_nnindexer():
        # Make sure this function doesn't run if it is already running
        print('REQUEST DENIED')
        return False
    print('REQUEST ACCPETED')
    daids_hashid = qreq_.ibs.get_annot_hashid_visual_uuid(daid_list)
    cfgstr = build_nnindex_cfgstr(qreq_, daid_list)
    cachedir = qreq_.ibs.get_flann_cachedir()
    # Save inverted cache uuid mappings for
    min_reindex_thresh = qreq_.qparams.min_reindex_thresh
    # Grab the keypoints names and image ids before query time?
    flann_params =  qreq_.qparams.flann_params
    # Get annot descriptors to index
    vecs_list, fgws_list = get_support_data(qreq_, daid_list)
    # Dont hash rowids when given enough info in nnindex_cfgstr
    flann_params['cores'] = 2  # Only ues a few cores in the background
    # Build/Load the flann index
    uuid_map_fpath   = get_nnindexer_uuid_map_fpath(qreq_)
    visual_uuid_list = qreq_.ibs.get_annot_visual_uuids(daid_list)

    # set temporary attribute for when the thread finishes
    finishtup = (uuid_map_fpath, daids_hashid, visual_uuid_list, min_reindex_thresh)
    CURRENT_THREAD = ut.spawn_background_process(
        background_flann_func, cachedir, daid_list, vecs_list, fgws_list,
        flann_params, cfgstr)

    CURRENT_THREAD.finishtup = finishtup
Example #5
0
def opendb_in_background(*args, **kwargs):
    """
    Starts a web server in the background
    """
    import utool as ut
    import time
    sec = kwargs.pop('wait', 0)
    if sec != 0:
        print('waiting %s seconds for startup' % (sec,))
    proc = ut.spawn_background_process(opendb, *args, **kwargs)
    if sec != 0:
        time.sleep(sec)  # wait for process to initialize
    return proc
Example #6
0
def opendb_in_background(*args, **kwargs):
    """
    Starts a web server in the background
    """
    import utool as ut
    import time
    sec = kwargs.pop('wait', 0)
    if sec != 0:
        print('waiting %s seconds for startup' % (sec, ))
    proc = ut.spawn_background_process(opendb, *args, **kwargs)
    if sec != 0:
        time.sleep(sec)  # wait for process to initialize
    return proc
Example #7
0
def opendb_in_background(*args, **kwargs):
    """
    Starts a web server in the background
    """
    import utool as ut
    import time

    sec = kwargs.pop('wait', 0)
    if sec != 0:
        raise AssertionError('wait is depricated')
        logger.info('waiting %s seconds for startup' % (sec, ))
    proc = ut.spawn_background_process(opendb, *args, **kwargs)
    if sec != 0:
        raise AssertionError('wait is depricated')
        time.sleep(sec)  # wait for process to initialize
    return proc
Example #8
0
def ensure_simple_server(port=5832):
    r"""
    CommandLine:
        python -m ibeis.web.zmq_task_queue --exec-ensure_simple_server
        python -m utool.util_web --exec-start_simple_webserver

    Example:
        >>> # DISABLE_DOCTEST
        >>> from ibeis.web.zmq_task_queue import *  # NOQA
        >>> result = ensure_simple_server()
        >>> print(result)
    """
    if ut.is_local_port_open(port):
        bgserver = ut.spawn_background_process(ut.start_simple_webserver, port=port)
        return bgserver
    else:
        bgserver = ut.DynStruct()
        bgserver.terminate2 = lambda: None
        print('server is running elsewhere')
    return bgserver
Example #9
0
def ensure_simple_server(port=5832):
    r"""
    CommandLine:
        python -m wbia.web.apis_engine --exec-ensure_simple_server
        python -m utool.util_web --exec-start_simple_webserver

    Example:
        >>> # DISABLE_DOCTEST
        >>> from wbia.web.apis_engine import *  # NOQA
        >>> result = ensure_simple_server()
        >>> print(result)
    """
    if ut.is_local_port_open(port):
        bgserver = ut.spawn_background_process(ut.start_simple_webserver, port=port)
        return bgserver
    else:
        bgserver = ut.DynStruct()
        bgserver.terminate2 = lambda: None
        logger.info('server is running elsewhere')
    return bgserver