Exemple #1
0
def refine_akmeans(data, datax2_clusterx, centroids, max_iters=5,
                   flann_params={}, cache_dir=None, cfgstr='',
                   use_data_hash=True, akmeans_cfgstr=None):
    """ Refines the approximates centroids """
    print('[akmeans.precompute] refining:')
    if akmeans_cfgstr is None:
        akmeans_cfgstr = nn.get_flann_cfgstr(data, flann_params, cfgstr, use_data_hash)
    datax2_clusterx_old = datax2_clusterx
    (datax2_clusterx, centroids) = _akmeans_iterate(data, centroids, datax2_clusterx_old, max_iters, flann_params, 0, 10)
    ut.save_cache(cache_dir, CLUSTERS_FNAME, akmeans_cfgstr, centroids)
    ut.save_cache(cache_dir, DATAX2CL_FNAME, akmeans_cfgstr, datax2_clusterx)
    return (datax2_clusterx, centroids)
Exemple #2
0
def process_query_request(ibs, qreq,
                          safe=True,
                          use_cache=USE_CACHE,
                          use_bigcache=USE_BIGCACHE):
    """
    The standard query interface.
    INPUT:
        ibs  - ibeis control object
        qreq - query request object (should be the same as ibs.qreq)
    Checks a big cache for qaid2_qres.
    If cache miss, tries to load each qres individually.
    On an individual cache miss, it preforms the query. """
    if utool.NOT_QUIET:
        print(' --- Process QueryRequest --- ')
    if len(qreq.qaids) <= 1:
        # Do not use bigcache single queries
        use_bigcache = False
    # Try and load directly from a big cache
    if use_bigcache:
        bigcache_dpath = qreq.bigcachedir
        bigcache_fname = (ibs.get_dbname() + '_QRESMAP' +
                          qreq.get_qaids_hashid() + qreq.get_daids_hashid())
        bigcache_cfgstr = qreq.cfg.get_cfgstr()
    if use_cache and use_bigcache:
        try:
            qaid2_qres = utool.load_cache(bigcache_dpath,
                                          bigcache_fname,
                                          bigcache_cfgstr)
            print('... qaid2_qres bigcache hit')
            return qaid2_qres
        except IOError:
            print('... qaid2_qres bigcache miss')
    # Try loading as many cached results as possible
    if use_cache:
        qaid2_qres, failed_qaids = mf.try_load_resdict(qreq)
    else:
        qaid2_qres = {}
        failed_qaids = qreq.qaids

    # Execute and save queries
    if len(failed_qaids) > 0:
        if safe:
            # FIXME: Ugg, this part is dirty
            qreq = pre_exec_checks(ibs, qreq)
        computed_qaid2_qres = execute_query_and_save_L1(ibs, qreq, failed_qaids)
        qaid2_qres.update(computed_qaid2_qres)  # Update cached results
    if use_bigcache:
        utool.save_cache(bigcache_dpath,
                         bigcache_fname,
                         bigcache_cfgstr, qaid2_qres)
    return qaid2_qres
Exemple #3
0
def process_query_request(ibs,
                          qreq,
                          safe=True,
                          use_cache=USE_CACHE,
                          use_bigcache=USE_BIGCACHE):
    """
    The standard query interface.
    INPUT:
        ibs  - ibeis control object
        qreq - query request object (should be the same as ibs.qreq)
    Checks a big cache for qaid2_qres.
    If cache miss, tries to load each qres individually.
    On an individual cache miss, it preforms the query. """
    if utool.NOT_QUIET:
        print(' --- Process QueryRequest --- ')
    if len(qreq.qaids) <= 1:
        # Do not use bigcache single queries
        use_bigcache = False
    # Try and load directly from a big cache
    if use_bigcache:
        bigcache_dpath = qreq.bigcachedir
        bigcache_fname = (ibs.get_dbname() + '_QRESMAP' +
                          qreq.get_qaids_hashid() + qreq.get_daids_hashid())
        bigcache_cfgstr = qreq.cfg.get_cfgstr()
    if use_cache and use_bigcache:
        try:
            qaid2_qres = utool.load_cache(bigcache_dpath, bigcache_fname,
                                          bigcache_cfgstr)
            print('... qaid2_qres bigcache hit')
            return qaid2_qres
        except IOError:
            print('... qaid2_qres bigcache miss')
    # Try loading as many cached results as possible
    if use_cache:
        qaid2_qres, failed_qaids = mf.try_load_resdict(qreq)
    else:
        qaid2_qres = {}
        failed_qaids = qreq.qaids

    # Execute and save queries
    if len(failed_qaids) > 0:
        if safe:
            # FIXME: Ugg, this part is dirty
            qreq = pre_exec_checks(ibs, qreq)
        computed_qaid2_qres = execute_query_and_save_L1(
            ibs, qreq, failed_qaids)
        qaid2_qres.update(computed_qaid2_qres)  # Update cached results
    if use_bigcache:
        utool.save_cache(bigcache_dpath, bigcache_fname, bigcache_cfgstr,
                         qaid2_qres)
    return qaid2_qres
def execute_bulk(qreq_):
    # Do not use bulk single queries
    bulk_on = qreq_.use_bulk_cache and len(qreq_.qaids) > qreq_.min_bulk_size
    if bulk_on:
        # Try and load directly from a big cache
        bc_dpath = ut.ensuredir((qreq_.cachedir, 'bulk_mc5'))
        bc_fname = 'bulk_mc5_' + '_'.join(qreq_.get_nice_parts())
        bc_cfgstr = qreq_.get_cfgstr(with_input=True)
        try:
            cm_list = ut.load_cache(bc_dpath, bc_fname, bc_cfgstr)
            logger.info('... bulk cache hit %r/%r' % (len(qreq_), len(qreq_)))
        except (IOError, AttributeError):
            # Fallback to smallcache
            cm_list = execute_singles(qreq_)
            ut.save_cache(bc_dpath, bc_fname, bc_cfgstr, cm_list)
    else:
        # Fallback to smallcache
        cm_list = execute_singles(qreq_)
    return cm_list
Exemple #5
0
def precompute_akmeans(data, num_clusters, max_iters=5, flann_params={},
                       cache_dir=None, force_recomp=False, use_data_hash=True,
                       cfgstr='', refine=False, akmeans_cfgstr=None):
    """ precompute aproximate kmeans with builtin caching """
    print('[akmeans] pre_akmeans()')
    # filename prefix constants
    assert cache_dir is not None, 'choose a cache directory'
    # Build a cfgstr if the full one is not specified
    if akmeans_cfgstr is None:
        # compute a hashstr based on the data
        akmeans_cfgstr = nn.get_flann_cfgstr(data, flann_params, cfgstr, use_data_hash)
    try:
        # Try and load a previous clustering
        if force_recomp:
            raise UserWarning('forceing recommpute')
        centroids        = ut.load_cache(cache_dir, CLUSTERS_FNAME, akmeans_cfgstr)
        datax2_clusterx = ut.load_cache(cache_dir, DATAX2CL_FNAME, akmeans_cfgstr)
        print('[akmeans.precompute] load successful')
        if refine:
            # Refines the cluster centers if specified
            (datax2_clusterx, centroids) =\
                refine_akmeans(data, datax2_clusterx, centroids,
                               max_iters=max_iters, flann_params=flann_params,
                               cache_dir=cache_dir, akmeans_cfgstr=akmeans_cfgstr)
        return (datax2_clusterx, centroids)
    except IOError as ex:
        ut.printex(ex, 'cache miss', iswarning=True)
    except UserWarning:
        pass
    # First time computation
    print('[akmeans.precompute] pre_akmeans(): calling akmeans')
    (datax2_clusterx, centroids) = akmeans(data, num_clusters, max_iters, flann_params)
    print('[akmeans.precompute] save and return')
    ut.save_cache(cache_dir, CLUSTERS_FNAME, akmeans_cfgstr, centroids)
    ut.save_cache(cache_dir, DATAX2CL_FNAME, akmeans_cfgstr, datax2_clusterx)
    return (datax2_clusterx, centroids)
Exemple #6
0
def submit_query_request(ibs,
                         qaid_list,
                         daid_list,
                         use_cache=None,
                         use_bigcache=None,
                         cfgdict=None,
                         qreq_=None,
                         verbose=None,
                         save_qcache=None,
                         prog_hook=None):
    """
    The standard query interface.

    TODO: rename use_cache to use_qcache

    Checks a big cache for qaid2_cm.  If cache miss, tries to load each cm
    individually.  On an individual cache miss, it preforms the query.

    Args:
        ibs (ibeis.IBEISController) : ibeis control object
        qaid_list (list): query annotation ids
        daid_list (list): database annotation ids
        use_cache (bool):
        use_bigcache (bool):

    Returns:
        qaid2_cm (dict): dict of QueryResult objects

    CommandLine:
        python -m ibeis.algo.hots.match_chips4 --test-submit_query_request

    Examples:
        >>> # SLOW_DOCTEST
        >>> from ibeis.algo.hots.match_chips4 import *  # NOQA
        >>> import ibeis
        >>> qaid_list = [1]
        >>> daid_list = [1, 2, 3, 4, 5]
        >>> use_bigcache = True
        >>> use_cache = True
        >>> ibs = ibeis.opendb(db='testdb1')
        >>> qreq_ = ibs.new_query_request(qaid_list, daid_list, cfgdict={}, verbose=True)
        >>> qaid2_cm = submit_query_request(ibs, qaid_list, daid_list, use_cache, use_bigcache, qreq_=qreq_)
    """
    # Get flag defaults if necessary
    if verbose is None:
        verbose = pipeline.VERB_PIPELINE
    if use_cache is None:
        use_cache = USE_CACHE
    if save_qcache is None:
        save_qcache = SAVE_CACHE
    if use_bigcache is None:
        use_bigcache = USE_BIGCACHE
    # Create new query request object to store temporary state
    if verbose:
        #print('[mc4] --- Submit QueryRequest_ --- ')
        ut.colorprint('[mc4] --- Submit QueryRequest_ --- ', 'darkyellow')
    assert qreq_ is not None, 'query request must be prebuilt'

    qreq_.prog_hook = prog_hook
    # --- BIG CACHE ---
    # Do not use bigcache single queries
    use_bigcache_ = (use_bigcache and use_cache
                     and len(qaid_list) > MIN_BIGCACHE_BUNDLE)
    if (use_bigcache_ or save_qcache) and len(qaid_list) > MIN_BIGCACHE_BUNDLE:
        bc_dpath = ibs.get_big_cachedir()
        # TODO: SYSTEM : semantic should only be used if name scoring is on
        #qhashid = qreq_.get_data_hashid()
        #dhashid = qreq_.get_query_hashid()
        #pipe_hashstr = qreq_.get_pipe_hashid()
        #bc_fname = ''.join((ibs.get_dbname(), '_QRESMAP', qhashid, dhashid, pipe_hashstr))
        #bc_fname = ''.join((ibs.get_dbname(), '_BIG_MC4_CM', qhashid, dhashid, pipe_hashstr))
        bc_fname = 'BIG_MC4_' + qreq_.get_shortinfo_cfgstr()
        #bc_cfgstr = ibs.cfg.query_cfg.get_cfgstr()  # FIXME, rectify w/ qparams
        bc_cfgstr = qreq_.get_full_cfgstr()
        if use_bigcache_:
            # Try and load directly from a big cache
            try:
                qaid2_cm = ut.load_cache(bc_dpath, bc_fname, bc_cfgstr)
                cm_list = [qaid2_cm[qaid] for qaid in qaid_list]
            except (IOError, AttributeError):
                pass
            else:
                return cm_list
    # ------------
    # Execute query request
    qaid2_cm = execute_query_and_save_L1(ibs,
                                         qreq_,
                                         use_cache,
                                         save_qcache,
                                         verbose=verbose)
    # ------------
    if save_qcache and len(qaid_list) > MIN_BIGCACHE_BUNDLE:
        ut.save_cache(bc_dpath, bc_fname, bc_cfgstr, qaid2_cm)

    cm_list = [qaid2_cm[qaid] for qaid in qaid_list]
    return cm_list
Exemple #7
0
def submit_query_request(ibs, qaid_list, daid_list, use_cache=None,
                         use_bigcache=None, cfgdict=None, qreq_=None,
                         verbose=pipeline.VERB_PIPELINE, save_qcache=None,
                         prog_hook=None):
    """
    The standard query interface.

    TODO: rename use_cache to use_qcache

    Checks a big cache for qaid2_cm.  If cache miss, tries to load each cm
    individually.  On an individual cache miss, it preforms the query.

    Args:
        ibs (IBEISController) : ibeis control object
        qaid_list (list): query annotation ids
        daid_list (list): database annotation ids
        use_cache (bool):
        use_bigcache (bool):

    Returns:
        qaid2_cm (dict): dict of QueryResult objects

    CommandLine:
        python -m ibeis.algo.hots.match_chips4 --test-submit_query_request

    Examples:
        >>> # SLOW_DOCTEST
        >>> from ibeis.algo.hots.match_chips4 import *  # NOQA
        >>> import ibeis
        >>> qaid_list = [1]
        >>> daid_list = [1, 2, 3, 4, 5]
        >>> use_bigcache = True
        >>> use_cache = True
        >>> ibs = ibeis.opendb(db='testdb1')
        >>> qreq_ = ibs.new_query_request(qaid_list, daid_list, cfgdict={}, verbose=True)
        >>> qaid2_cm = submit_query_request(ibs, qaid_list, daid_list, use_cache, use_bigcache, qreq_=qreq_)
    """
    # Get flag defaults if necessary
    if use_cache is None:
        use_cache = USE_CACHE
    if save_qcache is None:
        save_qcache = SAVE_CACHE
    if use_bigcache is None:
        use_bigcache = USE_BIGCACHE
    # Create new query request object to store temporary state
    if verbose:
        print(' --- Submit QueryRequest_ --- ')
    assert qreq_ is not None, 'query request must be prebuilt'

    qreq_.prog_hook = prog_hook
    # --- BIG CACHE ---
    # Do not use bigcache single queries
    use_bigcache_ = (use_bigcache and use_cache and
                     len(qaid_list) > MIN_BIGCACHE_BUNDLE)
    if (use_bigcache_ or save_qcache) and len(qaid_list) > MIN_BIGCACHE_BUNDLE:
        bc_dpath = ibs.get_big_cachedir()
        # TODO: SYSTEM : semantic should only be used if name scoring is on
        qhashid = ibs.get_annot_hashid_semantic_uuid(qaid_list, prefix='Q')
        dhashid = ibs.get_annot_hashid_semantic_uuid(daid_list, prefix='D')
        pipe_hashstr = qreq_.get_pipe_hashid()
        #bc_fname = ''.join((ibs.get_dbname(), '_QRESMAP', qhashid, dhashid, pipe_hashstr))
        bc_fname = ''.join((ibs.get_dbname(), '_BIG_CM', qhashid, dhashid, pipe_hashstr))
        bc_cfgstr = ibs.cfg.query_cfg.get_cfgstr()  # FIXME, rectify w/ qparams
        if use_bigcache_:
            # Try and load directly from a big cache
            try:
                qaid2_cm = ut.load_cache(bc_dpath, bc_fname, bc_cfgstr)
            except (IOError, AttributeError):
                pass
            else:
                return qaid2_cm
    # ------------
    # Execute query request
    qaid2_cm = execute_query_and_save_L1(ibs, qreq_, use_cache, save_qcache, verbose=verbose)
    # ------------
    if save_qcache and len(qaid_list) > MIN_BIGCACHE_BUNDLE:
        ut.save_cache(bc_dpath, bc_fname, bc_cfgstr, qaid2_cm)
    return qaid2_cm
Exemple #8
0
def get_unofficial_package_hrefs(nocache=None):
    """
    Downloads the entire webpage of available hrefs, or returns a cached copy
    """
    if nocache is None:
        nocache = FORCE

    cachedir = ut.get_app_resource_dir('utool')
    try:
        if nocache:
            raise Exception('cachemiss')
        all_href_list = ut.load_cache(cachedir, 'win32_hrefs', 'all_href_list')
        page_str      = ut.load_cache(cachedir, 'win32_hrefs', 'page_str')
        print('all_href_list cache hit')
        return all_href_list, page_str
    except Exception:
        print('all_href_list cache miss')
        pass
    # Read page html
    headers = { 'User-Agent' : 'Mozilla/5.0' }
    print('Sending request to %r' % (UNOFFICIAL_WEBURL,))
    req = urllib2.Request(UNOFFICIAL_WEBURL, None, headers)
    page = urllib2.urlopen(req)
    page_str = page.read()
    encrypted_lines = list(filter(lambda x: x.find('onclick') > -1, page_str.split('\n')))

    print('Read %d encrypted lines ' % (len(encrypted_lines,)))
    # List of all download links, now choose wisely, because we don't want
    # to hack for evil
    #line = encrypted_lines[0]
    def parse_encrypted(line):
        """
        <script type="text/javascript">
        // <![CDATA[
        if (top.location!=location) top.location.href=location.href;
        function dc(ml,mi){
            var ot="";
            for(var j=0;j<mi.length;j++)
                ot+=String.fromCharCode(ml[mi.charCodeAt(j)-48]);
            document.write(ot);
            }
        function dl1(ml,mi){
            var ot="";
            for(var j=0;j<mi.length;j++)
                ot+=String.fromCharCode(ml[mi.charCodeAt(j)-48]);
            location.href=ot;
            }
        function dl(ml,mi){
        mi=mi.replace('&lt;','<');
        mi=mi.replace('&gt;','>');
        mi=mi.replace('&amp;','&');
        setTimeout(function(){ dl1(ml,mi) }, 1500);}
        // ]]>
        </script>
        #start = line.find('javascript:dl') + len('javascript:dl') + 2
        #end   = line.find('title') - 4
        #code = line[start: end]
        #mid = code.find(']')
        #left = code[0:mid]
        #right = code[mid + 4:]
        #ml = left
        #mi = right
        """
        _, ml, mi, _ = parse.parse('{}javascript:dl([{}], "{}"){}', line)
        mi_ = mi.replace('&lt;', '<').replace('&gt;', '>').replace('&amp;', '&')

        #ml_ = eval('[' + ml + ']')
        ml_ = eval(ml)
        href_ = ''.join([chr(ml_[ord(michar) - 48]) for michar in mi_])
        href  = ''.join([UNOFFICIAL_WEBURL, href_])
        return href
    all_href_list = list(map(parse_encrypted, encrypted_lines))
    print('decrypted %d lines' % (len(all_href_list)))
    ut.save_cache(cachedir, 'win32_hrefs', 'all_href_list', all_href_list)
    ut.save_cache(cachedir, 'win32_hrefs', 'page_str', page_str)
    return all_href_list, page_str
Exemple #9
0
def make_single_testres(
    ibs,
    qaids,
    daids,
    pipecfg_list,
    cfgx2_lbl,
    cfgdict_list,
    lbl,
    testnameid,
    use_cache=None,
    subindexer_partial=ut.ProgIter,
):
    """
    CommandLine:
        python -m wbia run_expt
    """
    cfgslice = None
    if cfgslice is not None:
        pipecfg_list = pipecfg_list[cfgslice]

    dbname = ibs.get_dbname()

    # if ut.NOT_QUIET:
    #     logger.info('[harn] Make single testres')

    cfgx2_qreq_ = [
        ibs.new_query_request(qaids, daids, verbose=False, query_cfg=pipe_cfg)
        for pipe_cfg in ut.ProgIter(
            pipecfg_list, lbl='Building qreq_', enabled=False)
    ]

    if use_cache is None:
        use_cache = USE_BIG_TEST_CACHE

    if use_cache:
        try:
            bt_cachedir = ut.ensuredir(
                (ibs.get_cachedir(), 'BULK_TEST_CACHE2'))
            cfgstr_list = [
                qreq_.get_cfgstr(with_input=True) for qreq_ in cfgx2_qreq_
            ]
            bt_cachestr = ut.hashstr_arr27(cfgstr_list,
                                           ibs.get_dbname() + '_cfgs')
            bt_cachename = 'BULKTESTCACHE2_v2'
            testres = ut.load_cache(bt_cachedir, bt_cachename, bt_cachestr)
            testres.cfgdict_list = cfgdict_list
            testres.cfgx2_lbl = cfgx2_lbl  # hack override
        except IOError:
            pass
        else:
            if ut.NOT_QUIET:
                ut.colorprint('[harn] single testres cache hit... returning',
                              'brightcyan')
            return testres

    if ibs.table_cache:
        # HACK
        prev_feat_cfgstr = None

    cfgx2_cmsinfo = []
    cfgiter = subindexer_partial(range(len(cfgx2_qreq_)),
                                 lbl='pipe config',
                                 freq=1,
                                 adjust=False)
    # Run each pipeline configuration
    for cfgx in cfgiter:
        qreq_ = cfgx2_qreq_[cfgx]
        cprint = ut.colorprint
        cprint('testnameid=%r' % (testnameid, ), 'green')
        cprint(
            'annot_cfgstr = %s' %
            (qreq_.get_cfgstr(with_input=True, with_pipe=False), ),
            'yellow',
        )
        cprint('pipe_cfgstr= %s' % (qreq_.get_cfgstr(with_data=False), ),
               'brightcyan')
        cprint('pipe_hashstr = %s' % (qreq_.get_pipe_hashid(), ), 'cyan')
        if DRY_RUN:
            continue

        indent_prefix = '[%s cfg %d/%d]' % (
            dbname,
            # cfgiter.count (doesnt work when quiet)
            (cfgiter.parent_index * cfgiter.length) + cfgx,
            cfgiter.length * cfgiter.parent_length,
        )

        with ut.Indenter(indent_prefix):
            # Run the test / read cache
            _need_compute = True
            if use_cache:
                # smaller cache for individual configuration runs
                st_cfgstr = qreq_.get_cfgstr(with_input=True)
                st_cachedir = ut.unixjoin(bt_cachedir, 'small_tests')
                st_cachename = 'smalltest'
                ut.ensuredir(st_cachedir)
                try:
                    cmsinfo = ut.load_cache(st_cachedir, st_cachename,
                                            st_cfgstr)
                except IOError:
                    _need_compute = True
                else:
                    _need_compute = False
            if _need_compute:
                assert not ibs.table_cache
                if ibs.table_cache:
                    if len(prev_feat_cfgstr is not None
                           and prev_feat_cfgstr != qreq_.qparams.feat_cfgstr):
                        # Clear features to preserve memory
                        ibs.clear_table_cache()
                        # qreq_.ibs.print_cachestats_str()
                cm_list = qreq_.execute()
                cmsinfo = test_result.build_cmsinfo(cm_list, qreq_)
                # record previous feature configuration
                if ibs.table_cache:
                    prev_feat_cfgstr = qreq_.qparams.feat_cfgstr
                if use_cache:
                    ut.save_cache(st_cachedir, st_cachename, st_cfgstr,
                                  cmsinfo)
        if not NOMEMORY:
            # Store the results
            cfgx2_cmsinfo.append(cmsinfo)
        else:
            cfgx2_qreq_[cfgx] = None
    if ut.NOT_QUIET:
        ut.colorprint('[harn] Completed running test configurations', 'white')
    if DRY_RUN:
        logger.info('ran tests dryrun mode.')
        return
    if NOMEMORY:
        logger.info('ran tests in memory savings mode. Cannot Print. exiting')
        return
    # Store all pipeline config results in a test result object
    testres = test_result.TestResult(pipecfg_list, cfgx2_lbl, cfgx2_cmsinfo,
                                     cfgx2_qreq_)
    testres.testnameid = testnameid
    testres.lbl = lbl
    testres.cfgdict_list = cfgdict_list
    testres.aidcfg = None
    if use_cache:
        try:
            ut.save_cache(bt_cachedir, bt_cachename, bt_cachestr, testres)
        except Exception as ex:
            ut.printex(ex, 'error saving testres cache', iswarning=True)
            if ut.SUPER_STRICT:
                raise
    return testres
Exemple #10
0
def make_single_testres(ibs,
                        qaids,
                        daids,
                        pipecfg_list,
                        cfgx2_lbl,
                        cfgdict_list,
                        lbl,
                        testnameid,
                        use_cache=None,
                        subindexer_partial=ut.ProgressIter):
    """
    CommandLine:
        python -m ibeis.expt.harness --exec-run_test_configurations2
    """
    cfgslice = None
    if cfgslice is not None:
        pipecfg_list = pipecfg_list[cfgslice]

    dbname = ibs.get_dbname()

    if ut.NOT_QUIET:
        print('[harn] Make single testres')

    cfgx2_qreq_ = [
        ibs.new_query_request(qaids, daids, verbose=False, query_cfg=pipe_cfg)
        for pipe_cfg in ut.ProgressIter(
            pipecfg_list, lbl='Building qreq_', enabled=False)
    ]

    if use_cache is None:
        use_cache = USE_BIG_TEST_CACHE

    if use_cache:
        get_big_test_cache_info(ibs, cfgx2_qreq_)
        try:
            cachetup = get_big_test_cache_info(ibs, cfgx2_qreq_)
            testres = ut.load_cache(*cachetup)
            testres.cfgdict_list = cfgdict_list
            testres.cfgx2_lbl = cfgx2_lbl  # hack override
        except IOError:
            pass
        else:
            if ut.NOT_QUIET:
                ut.colorprint('[harn] single testres cache hit... returning',
                              'turquoise')
            return testres

    if ibs.table_cache:
        # HACK
        prev_feat_cfgstr = None

    cfgx2_cfgresinfo = []
    #nPipeCfg = len(pipecfg_list)
    cfgiter = subindexer_partial(range(len(cfgx2_qreq_)),
                                 lbl='query config',
                                 freq=1,
                                 adjust=False,
                                 separate=True)
    # Run each pipeline configuration
    for cfgx in cfgiter:
        qreq_ = cfgx2_qreq_[cfgx]

        ut.colorprint('testnameid=%r' % (testnameid, ), 'green')
        ut.colorprint(
            'annot_cfgstr = %s' %
            (qreq_.get_cfgstr(with_input=True, with_pipe=False), ), 'yellow')
        ut.colorprint(
            'pipe_cfgstr= %s' % (qreq_.get_cfgstr(with_data=False), ),
            'turquoise')
        ut.colorprint('pipe_hashstr = %s' % (qreq_.get_pipe_hashid(), ),
                      'teal')
        if DRY_RUN:
            continue

        indent_prefix = '[%s cfg %d/%d]' % (
            dbname,
            # cfgiter.count (doesnt work when quiet)
            (cfgiter.parent_index * cfgiter.nTotal) + cfgx,
            cfgiter.nTotal * cfgiter.parent_nTotal)

        with ut.Indenter(indent_prefix):
            # Run the test / read cache
            _need_compute = True
            if use_cache:
                # smaller cache for individual configuration runs
                st_cfgstr = qreq_.get_cfgstr(with_input=True)
                bt_cachedir = cachetup[0]
                st_cachedir = ut.unixjoin(bt_cachedir, 'small_tests')
                st_cachename = 'smalltest'
                ut.ensuredir(st_cachedir)
                try:
                    cfgres_info = ut.load_cache(st_cachedir, st_cachename,
                                                st_cfgstr)
                except IOError:
                    _need_compute = True
                else:
                    _need_compute = False
            if _need_compute:
                assert not ibs.table_cache
                if ibs.table_cache:
                    if (len(prev_feat_cfgstr is not None and
                            prev_feat_cfgstr != qreq_.qparams.feat_cfgstr)):
                        # Clear features to preserve memory
                        ibs.clear_table_cache()
                        #qreq_.ibs.print_cachestats_str()
                cfgres_info = get_query_result_info(qreq_)
                # record previous feature configuration
                if ibs.table_cache:
                    prev_feat_cfgstr = qreq_.qparams.feat_cfgstr
                if use_cache:
                    ut.save_cache(st_cachedir, st_cachename, st_cfgstr,
                                  cfgres_info)
        if not NOMEMORY:
            # Store the results
            cfgx2_cfgresinfo.append(cfgres_info)
        else:
            cfgx2_qreq_[cfgx] = None
    if ut.NOT_QUIET:
        ut.colorprint('[harn] Completed running test configurations', 'white')
    if DRY_RUN:
        print('ran tests dryrun mode.')
        return
    if NOMEMORY:
        print('ran tests in memory savings mode. Cannot Print. exiting')
        return
    # Store all pipeline config results in a test result object
    testres = test_result.TestResult(pipecfg_list, cfgx2_lbl, cfgx2_cfgresinfo,
                                     cfgx2_qreq_)
    testres.testnameid = testnameid
    testres.lbl = lbl
    testres.cfgdict_list = cfgdict_list
    testres.aidcfg = None
    if use_cache:
        try:
            ut.save_cache(*tuple(list(cachetup) + [testres]))
        except Exception as ex:
            ut.printex(ex, 'error saving testres cache', iswarning=True)
            if ut.SUPER_STRICT:
                raise
    return testres
Exemple #11
0
def get_unofficial_package_hrefs(nocache=None):
    """
    Downloads the entire webpage of available hrefs, or returns a cached copy
    """
    if nocache is None:
        nocache = FORCE

    cachedir = ut.get_app_resource_dir('utool')
    try:
        if nocache:
            raise Exception('cachemiss')
        all_href_list = ut.load_cache(cachedir, 'win32_hrefs', 'all_href_list')
        page_str = ut.load_cache(cachedir, 'win32_hrefs', 'page_str')
        print('all_href_list cache hit')
        return all_href_list, page_str
    except Exception:
        print('all_href_list cache miss')
        pass
    # Read page html
    headers = {'User-Agent': 'Mozilla/5.0'}
    print('Sending request to %r' % (UNOFFICIAL_WEBURL, ))
    req = urllib2.Request(UNOFFICIAL_WEBURL, None, headers)
    page = urllib2.urlopen(req)
    page_str = page.read()
    encrypted_lines = list(
        filter(lambda x: x.find('onclick') > -1, page_str.split('\n')))

    print('Read %d encrypted lines ' % (len(encrypted_lines, )))

    # List of all download links, now choose wisely, because we don't want
    # to hack for evil
    #line = encrypted_lines[0]
    def parse_encrypted(line):
        """
        <script type="text/javascript">
        // <![CDATA[
        if (top.location!=location) top.location.href=location.href;
        function dc(ml,mi){
            var ot="";
            for(var j=0;j<mi.length;j++)
                ot+=String.fromCharCode(ml[mi.charCodeAt(j)-48]);
            document.write(ot);
            }
        function dl1(ml,mi){
            var ot="";
            for(var j=0;j<mi.length;j++)
                ot+=String.fromCharCode(ml[mi.charCodeAt(j)-48]);
            location.href=ot;
            }
        function dl(ml,mi){
        mi=mi.replace('&lt;','<');
        mi=mi.replace('&gt;','>');
        mi=mi.replace('&amp;','&');
        setTimeout(function(){ dl1(ml,mi) }, 1500);}
        // ]]>
        </script>
        #start = line.find('javascript:dl') + len('javascript:dl') + 2
        #end   = line.find('title') - 4
        #code = line[start: end]
        #mid = code.find(']')
        #left = code[0:mid]
        #right = code[mid + 4:]
        #ml = left
        #mi = right
        """
        _, ml, mi, _ = parse.parse('{}javascript:dl([{}], "{}"){}', line)
        mi_ = mi.replace('&lt;', '<').replace('&gt;',
                                              '>').replace('&amp;', '&')

        #ml_ = eval('[' + ml + ']')
        ml_ = eval(ml)
        href_ = ''.join([chr(ml_[ord(michar) - 48]) for michar in mi_])
        href = ''.join([UNOFFICIAL_WEBURL, href_])
        return href

    all_href_list = list(map(parse_encrypted, encrypted_lines))
    print('decrypted %d lines' % (len(all_href_list)))
    ut.save_cache(cachedir, 'win32_hrefs', 'all_href_list', all_href_list)
    ut.save_cache(cachedir, 'win32_hrefs', 'page_str', page_str)
    return all_href_list, page_str
Exemple #12
0
def make_single_testres(ibs, qaids, daids, pipecfg_list, cfgx2_lbl,
                        cfgdict_list, lbl, testnameid, use_cache=None,
                        subindexer_partial=ut.ProgressIter):
    """
    CommandLine:
        python -m ibeis.expt.harness --exec-run_test_configurations2
    """
    cfgslice = None
    if cfgslice is not None:
        pipecfg_list = pipecfg_list[cfgslice]

    dbname = ibs.get_dbname()

    if ut.NOT_QUIET:
        print('[harn] Make single testres')

    cfgx2_qreq_ = [
        ibs.new_query_request(qaids, daids, verbose=False, query_cfg=pipe_cfg)
        for pipe_cfg in ut.ProgressIter(pipecfg_list, lbl='Building qreq_',
                                        enabled=False)
    ]

    if use_cache is None:
        use_cache = USE_BIG_TEST_CACHE

    if use_cache:
        get_big_test_cache_info(ibs, cfgx2_qreq_)
        try:
            cachetup = get_big_test_cache_info(ibs, cfgx2_qreq_)
            testres = ut.load_cache(*cachetup)
            testres.cfgdict_list = cfgdict_list
            testres.cfgx2_lbl = cfgx2_lbl  # hack override
        except IOError:
            pass
        else:
            if ut.NOT_QUIET:
                ut.colorprint('[harn] single testres cache hit... returning', 'turquoise')
            return testres

    if ibs.table_cache:
        # HACK
        prev_feat_cfgstr = None

    cfgx2_cfgresinfo = []
    #nPipeCfg = len(pipecfg_list)
    cfgiter = subindexer_partial(range(len(cfgx2_qreq_)),
                                 lbl='query config',
                                 freq=1, adjust=False,
                                 separate=True)
    # Run each pipeline configuration
    for cfgx in cfgiter:
        qreq_ = cfgx2_qreq_[cfgx]

        ut.colorprint('testnameid=%r' % (
            testnameid,), 'green')
        ut.colorprint('annot_cfgstr = %s' % (
            qreq_.get_cfgstr(with_input=True, with_pipe=False),), 'yellow')
        ut.colorprint('pipe_cfgstr= %s' % (
            qreq_.get_cfgstr(with_data=False),), 'turquoise')
        ut.colorprint('pipe_hashstr = %s' % (
            qreq_.get_pipe_hashid(),), 'teal')
        if DRY_RUN:
            continue

        indent_prefix = '[%s cfg %d/%d]' % (
            dbname,
            # cfgiter.count (doesnt work when quiet)
            (cfgiter.parent_index * cfgiter.nTotal) + cfgx ,
            cfgiter.nTotal * cfgiter.parent_nTotal
        )

        with ut.Indenter(indent_prefix):
            # Run the test / read cache
            _need_compute = True
            if use_cache:
                # smaller cache for individual configuration runs
                st_cfgstr = qreq_.get_cfgstr(with_input=True)
                bt_cachedir = cachetup[0]
                st_cachedir = ut.unixjoin(bt_cachedir, 'small_tests')
                st_cachename = 'smalltest'
                ut.ensuredir(st_cachedir)
                try:
                    cfgres_info = ut.load_cache(st_cachedir, st_cachename, st_cfgstr)
                except IOError:
                    _need_compute = True
                else:
                    _need_compute = False
            if _need_compute:
                assert not ibs.table_cache
                if ibs.table_cache:
                    if (len(prev_feat_cfgstr is not None and
                            prev_feat_cfgstr != qreq_.qparams.feat_cfgstr)):
                        # Clear features to preserve memory
                        ibs.clear_table_cache()
                        #qreq_.ibs.print_cachestats_str()
                cfgres_info = get_query_result_info(qreq_)
                # record previous feature configuration
                if ibs.table_cache:
                    prev_feat_cfgstr = qreq_.qparams.feat_cfgstr
                if use_cache:
                    ut.save_cache(st_cachedir, st_cachename, st_cfgstr, cfgres_info)
        if not NOMEMORY:
            # Store the results
            cfgx2_cfgresinfo.append(cfgres_info)
        else:
            cfgx2_qreq_[cfgx] = None
    if ut.NOT_QUIET:
        ut.colorprint('[harn] Completed running test configurations', 'white')
    if DRY_RUN:
        print('ran tests dryrun mode.')
        return
    if NOMEMORY:
        print('ran tests in memory savings mode. Cannot Print. exiting')
        return
    # Store all pipeline config results in a test result object
    testres = test_result.TestResult(pipecfg_list, cfgx2_lbl, cfgx2_cfgresinfo, cfgx2_qreq_)
    testres.testnameid = testnameid
    testres.lbl = lbl
    testres.cfgdict_list = cfgdict_list
    testres.aidcfg = None
    if use_cache:
        try:
            ut.save_cache(*tuple(list(cachetup) + [testres]))
        except Exception as ex:
            ut.printex(ex, 'error saving testres cache', iswarning=True)
            if ut.SUPER_STRICT:
                raise
    return testres