Exemplo n.º 1
0
def MAKE_BIG_DB():
    workdir = sysres.get_workdir()
    dbname = 'testdb_big'
    dbdir = join(workdir, dbname)
    utool.delete(dbdir)

    main_locals = ibeis.main(dbdir=dbdir, gui=False)
    ibs = main_locals['ibs']  # IBEIS Control
    gpath_list = grabdata.get_test_gpaths(ndata=1)

    imgdir = get_big_imgdir(workdir)
    gname_list = utool.list_images(imgdir, recursive=True)
    gpath_list = [join(imgdir, gname) for gname in gname_list]
    gpath_list = gpath_list

    assert all(map(exists, gpath_list)), 'some images dont exist'

    #nImages = len(gpath_list)
    #with utool.Timer('Add %d Images' % nImages):
    gid_list = ibs.add_images(gpath_list)

    #with utool.Timer('Convert %d Images to annotations' % nImages):
    aid_list = ibsfuncs.use_images_as_annotations(ibs, gid_list)

    #with utool.Timer('Compute %d chips' % nImages):
    cid_list = ibs.add_chips(aid_list)

    #with utool.Timer('Compute %d features' % nImages):
    fid_list = ibs.add_feats(cid_list)

    #with utool.Timer('Getting %d nFeats' % nImages):
    nFeats_list = ibs.get_num_feats(fid_list)

    print('Total number of features in the database: %r' % sum(nFeats_list))
    return locals()
Exemplo n.º 2
0
def _localizer_lightnet_validate_training_kit(lightnet_training_kit_url):
    # Remove bad files
    delete_path_list = [
        join(lightnet_training_kit_url, '__MACOSX'),
    ]
    for delete_path in delete_path_list:
        if exists(delete_path):
            ut.delete(delete_path)

    # Ensure first-level structure
    bin_path = join(lightnet_training_kit_url, 'bin')
    cfg_path = join(lightnet_training_kit_url, 'cfg')
    data_path = join(lightnet_training_kit_url, 'data')
    weights_path = join(lightnet_training_kit_url, 'darknet19_448.conv.23.pt')
    assert exists(bin_path)
    assert exists(cfg_path)
    assert exists(data_path)
    assert exists(weights_path)

    # Ensure second-level structure
    dataset_py_path = join(bin_path, 'dataset.template.py')
    labels_py_path = join(bin_path, 'labels.template.py')
    test_py_path = join(bin_path, 'test.template.py')
    train_py_path = join(bin_path, 'train.template.py')
    config_py_path = join(cfg_path, 'yolo.template.py')
    assert exists(dataset_py_path)
    assert exists(labels_py_path)
    assert exists(test_py_path)
    assert exists(train_py_path)
    assert exists(config_py_path)
Exemplo n.º 3
0
def MAKE_BIG_DB():
    workdir = sysres.get_workdir()
    dbname = 'testdb_big'
    dbdir  = join(workdir, dbname)
    utool.delete(dbdir)

    main_locals = ibeis.main(dbdir=dbdir, gui=False)
    ibs = main_locals['ibs']    # IBEIS Control
    gpath_list = grabdata.get_test_gpaths(ndata=1)

    imgdir = get_big_imgdir(workdir)
    gname_list = utool.list_images(imgdir, recursive=True)
    gpath_list = [join(imgdir, gname) for gname in gname_list]
    gpath_list = gpath_list

    assert all(map(exists, gpath_list)), 'some images dont exist'

    #nImages = len(gpath_list)
    #with utool.Timer('Add %d Images' % nImages):
    gid_list = ibs.add_images(gpath_list)

    #with utool.Timer('Convert %d Images to annotations' % nImages):
    aid_list = ibsfuncs.use_images_as_annotations(ibs, gid_list)

    #with utool.Timer('Compute %d chips' % nImages):
    cid_list = ibs.add_chips(aid_list)

    #with utool.Timer('Compute %d features' % nImages):
    fid_list = ibs.add_feats(cid_list)

    #with utool.Timer('Getting %d nFeats' % nImages):
    nFeats_list = ibs.get_num_feats(fid_list)

    print('Total number of features in the database: %r' % sum(nFeats_list))
    return locals()
Exemplo n.º 4
0
def labeler_train(ibs,
                  species_list=None,
                  species_mapping=None,
                  viewpoint_mapping=None,
                  ensembles=3,
                  **kwargs):
    from ibeis_cnn.ingest_ibeis import get_cnn_labeler_training_images_pytorch
    from ibeis.algo.detect import densenet

    species = '-'.join(species_list)
    args = (species, )
    data_path = join(ibs.get_cachedir(), 'extracted-labeler-%s' % args)
    extracted_path = get_cnn_labeler_training_images_pytorch(
        ibs,
        category_list=species_list,
        category_mapping=species_mapping,
        viewpoint_mapping=viewpoint_mapping,
        dest_path=data_path,
        **kwargs)

    weights_path_list = []
    for ensemble_num in range(ensembles):
        args = (
            species,
            ensemble_num,
        )
        output_path = join(ibs.get_cachedir(), 'training',
                           'labeler-%s-ensemble-%d' % args)
        if exists(output_path):
            ut.delete(output_path)
        weights_path = densenet.train(extracted_path,
                                      output_path,
                                      blur=False,
                                      flip=False)
        weights_path_list.append(weights_path)

    args = (species, )
    output_name = 'labeler.%s' % args
    ensemble_path = join(ibs.get_cachedir(), 'training', output_name)
    ut.ensuredir(ensemble_path)

    archive_path = '%s.zip' % (ensemble_path)
    ensemble_weights_path_list = []

    for index, weights_path in enumerate(sorted(weights_path_list)):
        assert exists(weights_path)
        ensemble_weights_path = join(ensemble_path,
                                     'labeler.%d.weights' % (index, ))
        ut.copy(weights_path, ensemble_weights_path)
        ensemble_weights_path_list.append(ensemble_weights_path)

    ensemble_weights_path_list = [ensemble_path] + ensemble_weights_path_list
    ut.archive_files(archive_path,
                     ensemble_weights_path_list,
                     overwrite=True,
                     common_prefix=True)

    return archive_path
Exemplo n.º 5
0
def download_model(algo, model_dir):
    zip_fpath = realpath(join(DETECTMODELS_DIR, algo + '.zip'))
    # Download and unzip model
    print('[grabmodels] Downloading model_dir=%s' % zip_fpath)
    dropbox_link = MODEL_URLS[algo]
    utool.download_url(dropbox_link, zip_fpath)
    utool.unzip_file(zip_fpath)
    # Cleanup
    utool.delete(zip_fpath)
Exemplo n.º 6
0
def canonical_classifier_train(ibs,
                               species,
                               ensembles=3,
                               extracted_path=None,
                               **kwargs):
    from wbia.other.detectexport import (
        get_cnn_classifier_canonical_training_images_pytorch, )
    from wbia.algo.detect import densenet

    args = (species, )
    data_path = join(ibs.get_cachedir(),
                     'extracted-classifier-canonical-%s' % args)
    if extracted_path is None:
        extracted_path = get_cnn_classifier_canonical_training_images_pytorch(
            ibs,
            species,
            dest_path=data_path,
        )

    weights_path_list = []
    for ensemble_num in range(ensembles):
        args = (
            species,
            ensemble_num,
        )
        output_path = join(ibs.get_cachedir(), 'training',
                           'classifier-canonical-%s-ensemble-%d' % args)
        if exists(output_path):
            ut.delete(output_path)
        weights_path = densenet.train(extracted_path,
                                      output_path,
                                      blur=False,
                                      flip=False)
        weights_path_list.append(weights_path)

    args = (species, )
    output_name = 'classifier.canonical.%s' % args
    ensemble_path = join(ibs.get_cachedir(), 'training', output_name)
    ut.ensuredir(ensemble_path)

    archive_path = '%s.zip' % (ensemble_path)
    ensemble_weights_path_list = []

    for index, weights_path in enumerate(sorted(weights_path_list)):
        assert exists(weights_path)
        ensemble_weights_path = join(
            ensemble_path, 'classifier.canonical.%d.weights' % (index, ))
        ut.copy(weights_path, ensemble_weights_path)
        ensemble_weights_path_list.append(ensemble_weights_path)

    ensemble_weights_path_list = [ensemble_path] + ensemble_weights_path_list
    ut.archive_files(archive_path,
                     ensemble_weights_path_list,
                     overwrite=True,
                     common_prefix=True)

    return archive_path
Exemplo n.º 7
0
 def ensure_symlinked(dataset):
     """
     Creates a symlink to the training path in the training junction
     """
     junction_dpath = get_juction_dpath()
     dataset_dname = basename(dataset.dataset_dpath)
     dataset_dlink = join(junction_dpath, dataset_dname)
     if exists(dataset_dlink):
         ut.delete(dataset_dlink)
     ut.symlink(dataset.dataset_dpath, dataset_dlink)
Exemplo n.º 8
0
def _download_model(algo, algo_modeldir):
    """
    Download and overwrites models
    """
    zip_fpath = realpath(join(algo_modeldir, algo + '.zip'))
    # Download and unzip model
    logger.info('[grabmodels] Downloading model_dir=%s' % zip_fpath)
    model_link = MODEL_URLS[algo]
    ut.download_url(model_link, zip_fpath)
    ut.unzip_file(zip_fpath)
    # Cleanup
    ut.delete(zip_fpath)
Exemplo n.º 9
0
def _download_model(algo, algo_modeldir):
    """
    Download and overwrites models
    """
    zip_fpath = realpath(join(algo_modeldir, algo + '.zip'))
    # Download and unzip model
    print('[grabmodels] Downloading model_dir=%s' % zip_fpath)
    model_link = MODEL_URLS[algo]
    ut.download_url(model_link, zip_fpath)
    ut.unzip_file(zip_fpath)
    # Cleanup
    ut.delete(zip_fpath)
Exemplo n.º 10
0
def log_detections(ibs, aid_list, fallback=True):
    import time
    import os

    json_log_path = ibs.get_logdir_local()
    json_log_filename = 'detections.json'
    json_log_filepath = os.path.join(json_log_path, json_log_filename)
    logger.info('Logging detections added to: %r' % (json_log_filepath,))

    try:
        # Log has never been made, create one
        if not os.path.exists(json_log_filepath):
            json_dict = {
                'updates': [],
            }
            json_str = ut.to_json(json_dict, pretty=True)
            with open(json_log_filepath, 'w') as json_log_file:
                json_log_file.write(json_str)
        # Get current log state
        with open(json_log_filepath, 'r') as json_log_file:
            json_str = json_log_file.read()
        json_dict = ut.from_json(json_str)
        # Get values
        db_name = ibs.get_db_name()
        db_init_uuid = ibs.get_db_init_uuid()
        # Zip all the updates together and write to updates list in dictionary
        gid_list = ibs.get_annot_gids(aid_list)
        bbox_list = ibs.get_annot_bboxes(aid_list)
        theta_list = ibs.get_annot_thetas(aid_list)
        zipped = list(zip(aid_list, gid_list, bbox_list, theta_list))
        for aid, gid, bbox, theta in zipped:
            json_dict['updates'].append(
                {
                    'time_unixtime': time.time(),
                    'db_name': db_name,
                    'db_init_uuid': db_init_uuid,
                    'image_rowid': gid,
                    'annot_rowid': aid,
                    'annot_bbox': bbox,
                    'annot_theta': theta,
                }
            )
        # Write new log state
        json_str = ut.to_json(json_dict, pretty=True)
        with open(json_log_filepath, 'w') as json_log_file:
            json_log_file.write(json_str)
    except Exception:
        if fallback:
            logger.info('WRITE DETECTION.JSON FAILED - ATTEMPTING FALLBACK')
            ut.delete(json_log_filepath)
            ibs.log_detections(aid_list, fallback=False)
        else:
            logger.info('WRITE DETECTION.JSON FAILED - FALLBACK FAILED')
Exemplo n.º 11
0
def revert_to_backup(ibs):
    r"""
    Args:
        db_dir (?):

    CommandLine:
        python -m wbia.control._sql_helpers --exec-revert_to_backup

    Example:
        >>> # SCRIPT
        >>> from wbia.control._sql_helpers import *  # NOQA
        >>> import wbia
        >>> ibs = wbia.opendb(defaultdb='elephants')
        >>> result = revert_to_backup(ibs)
        >>> print(result)
    """
    db_path = ibs.get_db_core_path()
    staging_path = ibs.get_db_staging_path()

    ibs.disconnect_sqldatabase()
    backup_dir = ibs.backupdir

    # Core database
    fname, ext = splitext(db_path)
    db_path_ = '%s_revert.sqlite3' % (fname, )
    ut.move(db_path, db_path_)
    fpath, fname = split(fname)
    path_list = sorted(ut.glob(backup_dir, '%s_*%s' % (
        fname,
        ext,
    )))
    assert len(path_list) > 0
    previous_backup = path_list[-1]
    copy_database(previous_backup, db_path)

    # Staging database
    fname, ext = splitext(staging_path)
    staging_path_ = '%s_revert.sqlite3' % (fname, )
    ut.move(staging_path, staging_path_)
    fpath, fname = split(fname)
    path_list = sorted(ut.glob(backup_dir, '%s_*%s' % (
        fname,
        ext,
    )))
    assert len(path_list) > 0
    previous_backup = path_list[-1]
    copy_database(previous_backup, staging_path)

    # Delete the cache
    ut.delete(ibs.cachedir)
Exemplo n.º 12
0
def reset_local_wildbook():
    r"""
    CommandLine:
        python -m ibeis.control.manual_wildbook_funcs --test-reset_local_wildbook

    Example:
        >>> # SCRIPT
        >>> from ibeis.control.manual_wildbook_funcs import *  # NOQA
        >>> reset_local_wildbook()
    """
    import utool as ut
    try:
        shutdown_wildbook_server()
    except ImportError:
        pass
    ut.delete(ut.unixjoin(ut.get_app_resource_dir('ibeis'), 'tomcat'))
Exemplo n.º 13
0
 def test_body2(count, logmode, backspace):
     ut.colorprint('\n---- count = %r -----' % (count,), 'yellow')
     ut.colorprint('backspace = %r' % (backspace,), 'yellow')
     ut.colorprint('logmode = %r' % (logmode,), 'yellow')
     if logmode:
         ut.delete('test.log')
         ut.start_logging('test.log')
     print('Start main loop')
     import time
     for count in ut.ProgressIter(range(2), freq=1, backspace=backspace):
         for count in ut.ProgressIter(range(50), freq=1, backspace=backspace):
             time.sleep(.01)
     print('Done with main loop work')
     print('Exiting main body')
     if logmode:
         ut.stop_logging()
Exemplo n.º 14
0
def image_upload(cleanup=True, **kwargs):
    r"""
    Returns the gid for an uploaded image.

    Args:
        image (image binary): the POST variable containing the binary
            (multi-form) image data
        **kwargs: Arbitrary keyword arguments; the kwargs are passed down to
            the add_images function

    Returns:
        gid (rowids): gid corresponding to the image submitted.
            lexigraphical order.

    RESTful:
        Method: POST
        URL:    /api/upload/image/
    """
    ibs = current_app.ibs
    logger.info('request.files = %s' % (request.files, ))

    filestore = request.files.get('image', None)
    if filestore is None:
        raise controller_inject.WebMissingInput(
            'Missing required image parameter', 'image')
        # raise IOError('Image not given')

    uploads_path = ibs.get_uploadsdir()
    ut.ensuredir(uploads_path)
    current_time = time.strftime('%Y_%m_%d_%H_%M_%S')

    modifier = 1
    upload_filename = 'upload_%s.png' % (current_time)
    while exists(upload_filename):
        upload_filename = 'upload_%s_%04d.png' % (current_time, modifier)
        modifier += 1

    upload_filepath = join(uploads_path, upload_filename)
    filestore.save(upload_filepath)

    gid_list = ibs.add_images([upload_filepath], **kwargs)
    gid = gid_list[0]

    if cleanup and exists(upload_filepath):
        ut.delete(upload_filepath)

    return gid
Exemplo n.º 15
0
def image_src_api(rowid=None, thumbnail=False, fresh=False, **kwargs):
    r"""
    Returns the image file of image <gid>

    Example:
        >>> # xdoctest: +REQUIRES(--web-tests)
        >>> from wbia.web.app import *  # NOQA
        >>> import wbia
        >>> with wbia.opendb_bg_web('testdb1', start_job_queue=False, managed=True) as web_ibs:
        ...     resp = web_ibs.send_wbia_request('/api/image/src/1/', type_='get', json=False)
        >>> print(resp)

    RESTful:
        Method: GET
        URL:    /api/image/src/<rowid>/
    """
    from PIL import Image  # NOQA

    thumbnail = thumbnail or 'thumbnail' in request.args or 'thumbnail' in request.form
    ibs = current_app.ibs
    if thumbnail:
        gpath = ibs.get_image_thumbpath(rowid, ensure_paths=True)
        fresh = fresh or 'fresh' in request.args or 'fresh' in request.form
        if fresh:
            # import os
            # os.remove(gpath)
            ut.delete(gpath)
            gpath = ibs.get_image_thumbpath(rowid, ensure_paths=True)
    else:
        gpath = ibs.get_image_paths(rowid)

    # Load image
    assert gpath is not None, 'image path should not be None'
    image = vt.imread(gpath, orient='auto')
    image = appf.resize_via_web_parameters(image)
    image = image[:, :, ::-1]

    # Encode image
    image_pil = Image.fromarray(image)
    if six.PY2:
        img_io = StringIO()
    else:
        img_io = BytesIO()
    image_pil.save(img_io, 'JPEG', quality=100)
    img_io.seek(0)
    return send_file(img_io, mimetype='image/jpeg')
Exemplo n.º 16
0
def image_src_api(rowid=None, thumbnail=False, fresh=False, **kwargs):
    r"""
    Returns the image file of image <gid>

    Example:
        >>> from wbia.web.app import *  # NOQA
        >>> import wbia
        >>> with wbia.opendb_with_web('testdb1') as (ibs, client):
        ...     resp = client.get('/api/image/src/1/')
        >>> print(resp.data)
        b'\xff\xd8\xff\xe0\x00\x10JFIF...

    RESTful:
        Method: GET
        URL:    /api/image/src/<rowid>/
    """
    from PIL import Image  # NOQA

    thumbnail = thumbnail or 'thumbnail' in request.args or 'thumbnail' in request.form
    ibs = current_app.ibs
    if thumbnail:
        gpath = ibs.get_image_thumbpath(rowid, ensure_paths=True)
        fresh = fresh or 'fresh' in request.args or 'fresh' in request.form
        if fresh:
            # import os
            # os.remove(gpath)
            ut.delete(gpath)
            gpath = ibs.get_image_thumbpath(rowid, ensure_paths=True)
    else:
        gpath = ibs.get_image_paths(rowid)

    # Load image
    assert gpath is not None, 'image path should not be None'
    image = vt.imread(gpath, orient='auto')
    image = appf.resize_via_web_parameters(image)
    image = image[:, :, ::-1]

    # Encode image
    image_pil = Image.fromarray(image)
    if six.PY2:
        img_io = StringIO()
    else:
        img_io = BytesIO()
    image_pil.save(img_io, 'JPEG', quality=100)
    img_io.seek(0)
    return send_file(img_io, mimetype='image/jpeg')
Exemplo n.º 17
0
def clear_test_img_cache():
    r"""
    CommandLine:
        python -m utool.util_grabdata --test-clear_test_img_cache

    Example:
        >>> # UNSTABLE_DOCTEST
        >>> from utool.util_grabdata import *  # NOQA
        >>> testimg_fpath = clear_test_img_cache()
        >>> result = str(testimg_fpath)
        >>> print(result)
    """
    import utool as ut
    download_dir = util_cplat.get_app_resource_dir('utool')
    for key in TESTIMG_URL_DICT:
        fpath = join(download_dir, key)
        ut.delete(fpath)
Exemplo n.º 18
0
def collector_loop(port_dict, dbdir, containerized):
    """
    Service that stores completed algorithm results
    """
    import ibeis
    update_proctitle('collector_loop')
    print = partial(ut.colorprint, color='yellow')
    with ut.Indenter('[collect] '):
        collect_rout_sock = ctx.socket(zmq.ROUTER)
        collect_rout_sock.setsockopt_string(zmq.IDENTITY, 'collect.ROUTER')
        collect_rout_sock.connect(port_dict['collect_url2'])
        if VERBOSE_JOBS:
            print('connect collect_url2  = %r' % (port_dict['collect_url2'], ))

        ibs = ibeis.opendb(dbdir=dbdir, use_cache=False, web=False)
        # shelve_path = join(ut.get_shelves_dir(appname='ibeis'), 'engine')
        shelve_path = ibs.get_shelves_path()
        ut.delete(shelve_path)
        ut.ensuredir(shelve_path)

        collecter_data = {}
        awaiting_data = {}
        try:
            while True:
                # several callers here
                # CALLER: collector_notify
                # CALLER: collector_store
                # CALLER: collector_request_status
                # CALLER: collector_request_result
                idents, collect_request = rcv_multipart_json(collect_rout_sock,
                                                             print=print)
                try:
                    reply = on_collect_request(collect_request,
                                               collecter_data,
                                               awaiting_data,
                                               shelve_path,
                                               containerized=containerized)
                except Exception as ex:
                    print(ut.repr3(collect_request))
                    ut.printex(ex, 'ERROR in collection')
                send_multipart_json(collect_rout_sock, idents, reply)
        except KeyboardInterrupt:
            print('Caught ctrl+c in collector loop. Gracefully exiting')
        if VERBOSE_JOBS:
            print('Exiting collector')
Exemplo n.º 19
0
 def test_body(count, logmode, backspace):
     ut.colorprint('\n---- count = %r -----' % (count,), 'yellow')
     ut.colorprint('backspace = %r' % (backspace,), 'yellow')
     ut.colorprint('logmode = %r' % (logmode,), 'yellow')
     if logmode:
         ut.delete('test.log')
         ut.start_logging('test.log')
     print('Start main loop')
     import time
     for count in ut.ProgressIter(range(20), freq=3, backspace=backspace):
         time.sleep(.01)
     print('Done with main loop work')
     print('Exiting main body')
     if logmode:
         ut.stop_logging()
         #print('-----DONE LOGGING----')
         testlog_text = ut.readfrom('test.log')
         print(ut.indent(testlog_text.replace('\r', '\n'), '        '))
Exemplo n.º 20
0
 def test_body2(count, logmode, backspace):
     ut.colorprint('\n---- count = %r -----' % (count, ), 'yellow')
     ut.colorprint('backspace = %r' % (backspace, ), 'yellow')
     ut.colorprint('logmode = %r' % (logmode, ), 'yellow')
     if logmode:
         ut.delete('test.log')
         ut.start_logging('test.log')
     print('Start main loop')
     import time
     for count in ut.ProgressIter(range(2), freq=1, backspace=backspace):
         for count in ut.ProgressIter(range(50),
                                      freq=1,
                                      backspace=backspace):
             time.sleep(.01)
     print('Done with main loop work')
     print('Exiting main body')
     if logmode:
         ut.stop_logging()
Exemplo n.º 21
0
 def test_body(count, logmode, backspace):
     ut.colorprint('\n---- count = %r -----' % (count, ), 'yellow')
     ut.colorprint('backspace = %r' % (backspace, ), 'yellow')
     ut.colorprint('logmode = %r' % (logmode, ), 'yellow')
     if logmode:
         ut.delete('test.log')
         ut.start_logging('test.log')
     print('Start main loop')
     import time
     for count in ut.ProgressIter(range(20), freq=3, backspace=backspace):
         time.sleep(.01)
     print('Done with main loop work')
     print('Exiting main body')
     if logmode:
         ut.stop_logging()
         #print('-----DONE LOGGING----')
         testlog_text = ut.readfrom('test.log')
         print(ut.indent(testlog_text.replace('\r', '\n'), '        '))
Exemplo n.º 22
0
def clean_pyinstaller():
    print('[installer] clean_pyinstaller()')
    cwd = get_setup_dpath()
    utool.remove_files_in_dir(cwd, 'IBEISApp.pkg', recursive=False)
    utool.remove_files_in_dir(cwd, 'qt_menu.nib', recursive=False)
    utool.remove_files_in_dir(cwd, 'qt_menu.nib', recursive=False)
    utool.delete(join(cwd, 'dist'))
    utool.delete(join(cwd, 'build'))
    utool.delete(join(cwd, 'pyrf'))
    utool.delete(join(cwd, 'pyhesaff'))
    print('[installer] finished clean_pyinstaller()')
Exemplo n.º 23
0
def clear_uuid_cache(qreq_):
    """
    CommandLine:
        python -m ibeis.algo.hots.neighbor_index_cache --test-clear_uuid_cache

    Example:
        >>> # DISABLE_DOCTEST
        >>> from ibeis.algo.hots.neighbor_index_cache import *  # NOQA
        >>> import ibeis
        >>> qreq_ = ibeis.testdata_qreq_(defaultdb='testdb1', p='default:fg_on=True')
        >>> fgws_list = clear_uuid_cache(qreq_)
        >>> result = str(fgws_list)
        >>> print(result)
    """
    print('[nnindex] clearing uuid cache')
    uuid_map_fpath = get_nnindexer_uuid_map_fpath(qreq_)
    ut.delete(uuid_map_fpath)
    ut.delete(uuid_map_fpath + '.lock')
    print('[nnindex] finished uuid cache clear')
Exemplo n.º 24
0
Arquivo: apis.py Projeto: whaozl/ibeis
def image_src_api(gid=None, thumbnail=False, fresh=False, **kwargs):
    r"""
    Returns the image file of image <gid>

    Example:
        >>> # WEB_DOCTEST
        >>> from ibeis.web.app import *  # NOQA
        >>> import ibeis
        >>> web_ibs = ibeis.opendb_bg_web('testdb1', start_job_queue=False)
        >>> web_ibs.send_ibeis_request('/api/image/src/', type_='get', gid=1)
        >>> print(resp)
        >>> web_ibs.terminate2()

    RESTful:
        Method: GET
        URL:    /api/image/src/<gid>/
    """
    from PIL import Image  # NOQA
    thumbnail = thumbnail or 'thumbnail' in request.args or 'thumbnail' in request.form
    ibs = current_app.ibs
    if thumbnail:
        gpath = ibs.get_image_thumbpath(gid, ensure_paths=True)
        fresh = fresh or 'fresh' in request.args or 'fresh' in request.form
        if fresh:
            #import os
            #os.remove(gpath)
            ut.delete(gpath)
            gpath = ibs.get_image_thumbpath(gid, ensure_paths=True)
    else:
        gpath = ibs.get_image_paths(gid)

    # Load image
    assert gpath is not None, 'image path should not be None'
    image = vt.imread(gpath, orient='auto')
    image = appf.resize_via_web_parameters(image)
    image = image[:, :, ::-1]

    # Encode image
    image_pil = Image.fromarray(image)
    img_io = StringIO.StringIO()
    image_pil.save(img_io, 'JPEG', quality=100)
    img_io.seek(0)
    return send_file(img_io, mimetype='image/jpeg')
Exemplo n.º 25
0
def get_nth_test_schema_version(schema_spec, n=-1):
    """
    Gets a fresh and empty test version of a schema

    Args:
        schema_spec (module): schema module to get nth version of
        n (int): version index (-1 is the latest)
    """
    from dtool.sql_control import SQLDatabaseController
    dbname = schema_spec.__name__
    print('[_SQL] getting n=%r-th version of %r' % (n, dbname))
    version_expected = list(schema_spec.VALID_VERSIONS.keys())[n]
    cachedir = ut.ensure_app_resource_dir('ibeis_test')
    db_fname = 'test_%s.sqlite3' % dbname
    ut.delete(join(cachedir, db_fname))
    db = SQLDatabaseController(cachedir, db_fname, text_factory=unicode)
    ensure_correct_version(
        None, db, version_expected, schema_spec, dobackup=False)
    return db
Exemplo n.º 26
0
def clear_uuid_cache(qreq_):
    """
    CommandLine:
        python -m ibeis.algo.hots.neighbor_index_cache --test-clear_uuid_cache

    Example:
        >>> # DISABLE_DOCTEST
        >>> from ibeis.algo.hots.neighbor_index_cache import *  # NOQA
        >>> import ibeis
        >>> qreq_ = ibeis.testdata_qreq_(defaultdb='testdb1', p='default:fg_on=True')
        >>> fgws_list = clear_uuid_cache(qreq_)
        >>> result = str(fgws_list)
        >>> print(result)
    """
    print('[nnindex] clearing uuid cache')
    uuid_map_fpath = get_nnindexer_uuid_map_fpath(qreq_)
    ut.delete(uuid_map_fpath)
    ut.delete(uuid_map_fpath + '.lock')
    print('[nnindex] finished uuid cache clear')
Exemplo n.º 27
0
def redownload_models(modeldir='default', verbose=True):
    r"""
    Args:
        modeldir (str): (default = 'default')
        verbose (bool):  verbosity flag(default = True)

    CommandLine:
        python -m ibeis.algo.detect.grabmodels --test-redownload_models

    Example:
        >>> # SCRIPT
        >>> from ibeis.algo.detect.grabmodels import *  # NOQA
        >>> result = redownload_models()
    """
    print('[grabmodels] redownload_detection_models')
    modeldir = _expand_modeldir(modeldir)
    ut.delete(modeldir)
    ensure_models(modeldir=modeldir, verbose=verbose)
    if verbose:
        print('[grabmodels] finished redownload_detection_models')
Exemplo n.º 28
0
def redownload_models(modeldir='default', verbose=True):
    r"""
    Args:
        modeldir (str): (default = 'default')
        verbose (bool):  verbosity flag(default = True)

    CommandLine:
        python -m wbia.algo.detect.grabmodels --test-redownload_models

    Example:
        >>> # SCRIPT
        >>> from wbia.algo.detect.grabmodels import *  # NOQA
        >>> result = redownload_models()
    """
    logger.info('[grabmodels] redownload_detection_models')
    modeldir = _expand_modeldir(modeldir)
    ut.delete(modeldir)
    ensure_models(modeldir=modeldir, verbose=verbose)
    if verbose:
        logger.info('[grabmodels] finished redownload_detection_models')
Exemplo n.º 29
0
def collector_loop(dbdir):
    """
    Service that stores completed algorithm results
    """
    import ibeis
    update_proctitle('collector_loop')
    print = partial(ut.colorprint, color='yellow')
    with ut.Indenter('[collect] '):
        collect_rout_sock = ctx.socket(zmq.ROUTER)
        collect_rout_sock.setsockopt_string(zmq.IDENTITY, 'collect.ROUTER')
        collect_rout_sock.connect(collect_url2)
        if VERBOSE_JOBS:
            print('connect collect_url2  = %r' % (collect_url2,))

        ibs = ibeis.opendb(dbdir=dbdir, use_cache=False, web=False)
        # shelve_path = join(ut.get_shelves_dir(appname='ibeis'), 'engine')
        shelve_path = ibs.get_shelves_path()
        ut.delete(shelve_path)
        ut.ensuredir(shelve_path)

        collecter_data = {}
        awaiting_data = {}
        try:
            while True:
                # several callers here
                # CALLER: collector_notify
                # CALLER: collector_store
                # CALLER: collector_request_status
                # CALLER: collector_request_result
                idents, collect_request = rcv_multipart_json(collect_rout_sock, print=print)
                try:
                    reply = on_collect_request(collect_request, collecter_data,
                                               awaiting_data, shelve_path)
                except Exception as ex:
                    print(ut.repr3(collect_request))
                    ut.printex(ex, 'ERROR in collection')
                send_multipart_json(collect_rout_sock, idents, reply)
        except KeyboardInterrupt:
            print('Caught ctrl+c in collector loop. Gracefully exiting')
        if VERBOSE_JOBS:
            print('Exiting collector')
Exemplo n.º 30
0
def download_win_packages(href_list):
    pkg_exe_list = []
    #href = href_list[0]
    #pkg_exe = ut.util_grabdata.grab_file_url(href, delay=3, spoof=True)
    #pkg_exe_list += [pkg_exe]
    ## Execute download
    for href in href_list:
        # Download the file if you havent already done so
        pkg_exe = ut.util_grabdata.grab_file_url(href, delay=3, spoof=True)
        # Check to make sure it worked
        nBytes = ut.get_file_nBytes(pkg_exe)
        if nBytes < 1000:
            print('There may be a problem with %r' % (pkg_exe,))
            print('nBytes = %r' % (nBytes,))
            RETRY_PROBLEMS = False
            if RETRY_PROBLEMS:
                # retry if file was probably corrupted
                ut.delete(pkg_exe)
                pkg_exe = ut.util_grabdata.grab_file_url(href, delay=3, spoof=True)
        pkg_exe_list += [pkg_exe]
    return pkg_exe_list
Exemplo n.º 31
0
def testdata_ensure_unconverted_hsdb():
    r"""
    Makes an unconverted test datapath

    CommandLine:
        python -m ibeis.dbio.ingest_hsdb --test-testdata_ensure_unconverted_hsdb

    Example:
        >>> # SCRIPT
        >>> from ibeis.dbio.ingest_hsdb import *  # NOQA
        >>> result = testdata_ensure_unconverted_hsdb()
        >>> print(result)
    """
    import utool as ut
    assert ut.is_developer(), 'dev function only'
    # Make an unconverted test database
    ut.ensurepath('/raid/tests/tmp')
    ut.delete('/raid/tests/tmp/Frogs')
    ut.copy('/raid/tests/Frogs', '/raid/tests/tmp/Frogs')
    hsdb_dir = '/raid/tests/tmp/Frogs'
    return hsdb_dir
Exemplo n.º 32
0
def testdata_ensure_unconverted_hsdb():
    r"""
    Makes an unconverted test datapath

    CommandLine:
        python -m ibeis.dbio.ingest_hsdb --test-testdata_ensure_unconverted_hsdb

    Example:
        >>> # SCRIPT
        >>> from ibeis.dbio.ingest_hsdb import *  # NOQA
        >>> result = testdata_ensure_unconverted_hsdb()
        >>> print(result)
    """
    import utool as ut
    assert ut.is_developer(), 'dev function only'
    # Make an unconverted test database
    ut.ensurepath('/raid/tests/tmp')
    ut.delete('/raid/tests/tmp/Frogs')
    ut.copy('/raid/tests/Frogs', '/raid/tests/tmp/Frogs')
    hsdb_dir = '/raid/tests/tmp/Frogs'
    return hsdb_dir
Exemplo n.º 33
0
def purge_local_wildbook():
    r"""
    Shuts down the server and then purges the server on disk

    CommandLine:
        python -m ibeis purge_local_wildbook
        python -m ibeis purge_local_wildbook --purge-war

    Example:
        >>> # SCRIPT
        >>> from ibeis.control.wildbook_manager import *  # NOQA
        >>> purge_local_wildbook()
    """
    try:
        shutdown_wildbook_server()
    except ImportError:
        pass
    ut.delete(ut.unixjoin(ut.get_app_resource_dir('ibeis'), 'tomcat'))
    if ut.get_argflag('--purge-war'):
        war_fpath = find_or_download_wilbook_warfile(ensure=False)
        ut.delete(war_fpath)
Exemplo n.º 34
0
def purge_local_wildbook():
    r"""
    Shuts down the server and then purges the server on disk

    CommandLine:
        python -m ibeis purge_local_wildbook
        python -m ibeis purge_local_wildbook --purge-war

    Example:
        >>> # SCRIPT
        >>> from ibeis.control.wildbook_manager import *  # NOQA
        >>> purge_local_wildbook()
    """
    try:
        shutdown_wildbook_server()
    except ImportError:
        pass
    ut.delete(ut.unixjoin(ut.get_app_resource_dir('ibeis'), 'tomcat'))
    if ut.get_argflag('--purge-war'):
        war_fpath = find_or_download_wilbook_warfile(ensure=False)
        ut.delete(war_fpath)
Exemplo n.º 35
0
def time_different_diskstores():
    """
    %timeit shelf_write_test()    # 15.1 ms per loop
    %timeit cPickle_write_test()  # 1.26 ms per loop

    %timeit shelf_read_test()     # 8.77 ms per loop
    %timeit cPickle_read_test()   # 2.4 ms per loop
    %timeit cPickle_read_test2()  # 2.35 ms per loop

    %timeit json_read_test()
    %timeit json_write_test()
    """
    import six
    import uuid
    import simplejson as json
    import cPickle
    import utool as ut
    shelf_path = 'test.shelf'
    json_path = 'test.json'
    cpkl_path = 'test.pkl'
    size = 1000
    dict_ = {str(key): str(uuid.uuid4()) for key in range(size)}
    ut.delete(cpkl_path)
    ut.delete(json_path)
    ut.delete(shelf_path)

    def shelf_write_test():
        with ut.shelf_open(shelf_path) as shelf_dict:
            shelf_dict.update(dict_)

    def shelf_read_test():
        with ut.shelf_open(shelf_path) as shelf_dict:
            test = {key: val for key, val in six.iteritems(shelf_dict)}
        assert len(test) > 0

    def json_write_test():
        with open(json_path, 'wb') as outfile:
            json.dump(dict_, outfile)

    def cPickle_write_test():
        with open(cpkl_path, 'wb') as outfile:
            cPickle.dump(dict_, outfile)

    def cPickle_read_test():
        with open(cpkl_path, 'rb') as outfile:
            test = {key: val for key, val in six.iteritems(cPickle.load(outfile))}
        assert len(test) > 0

    def cPickle_read_test2():
        with open(cpkl_path, 'rb') as outfile:
            test = cPickle.load(outfile)
        assert len(test) > 0

    shelf_write_test()
    shelf_read_test()
    #json_write_test()
    #json_read_test()
    cPickle_write_test()
    cPickle_read_test()
    cPickle_read_test2()
Exemplo n.º 36
0
def download_win_packages(href_list):
    pkg_exe_list = []
    #href = href_list[0]
    #pkg_exe = ut.util_grabdata.grab_file_url(href, delay=3, spoof=True)
    #pkg_exe_list += [pkg_exe]
    ## Execute download
    for href in href_list:
        # Download the file if you havent already done so
        pkg_exe = ut.util_grabdata.grab_file_url(href, delay=3, spoof=True)
        # Check to make sure it worked
        nBytes = ut.get_file_nBytes(pkg_exe)
        if nBytes < 1000:
            print('There may be a problem with %r' % (pkg_exe, ))
            print('nBytes = %r' % (nBytes, ))
            RETRY_PROBLEMS = False
            if RETRY_PROBLEMS:
                # retry if file was probably corrupted
                ut.delete(pkg_exe)
                pkg_exe = ut.util_grabdata.grab_file_url(href,
                                                         delay=3,
                                                         spoof=True)
        pkg_exe_list += [pkg_exe]
    return pkg_exe_list
Exemplo n.º 37
0
def get_nth_test_schema_version(schema_spec, n=-1):
    """
    Gets a fresh and empty test version of a schema

    Args:
        schema_spec (module): schema module to get nth version of
        n (int): version index (-1 is the latest)
    """
    from wbia.dtool.sql_control import SQLDatabaseController

    dbname = schema_spec.__name__
    logger.info('[_SQL] getting n=%r-th version of %r' % (n, dbname))
    version_expected = list(schema_spec.VALID_VERSIONS.keys())[n]
    cachedir = ut.ensure_app_resource_dir('wbia_test')
    db_fname = 'test_%s.sqlite3' % dbname
    ut.delete(join(cachedir, db_fname))
    db_uri = 'sqlite:///{}'.format(realpath(join(cachedir, db_fname)))
    db = SQLDatabaseController(db_uri, dbname)
    ensure_correct_version(None,
                           db,
                           version_expected,
                           schema_spec,
                           dobackup=False)
    return db
Exemplo n.º 38
0
def read_thumb_size(thumb_path):
    import vtool as vt

    if VERBOSE_THUMB:
        print('[ThumbDelegate] Reading thumb size')
    # npimg = vt.imread(thumb_path, delete_if_corrupted=True)
    # (height, width) = npimg.shape[0:2]
    # del npimg
    try:
        width, height = vt.open_image_size(thumb_path)
    except IOError as ex:
        if ut.checkpath(thumb_path, verbose=True):
            ut.printex(
                ex,
                'image=%r seems corrupted. Needs deletion' % (thumb_path, ),
                iswarning=True,
            )
            ut.delete(thumb_path)
        else:
            ut.printex(ex,
                       'image=%r does not exist', (thumb_path, ),
                       iswarning=True)
        raise
    return width, height
Exemplo n.º 39
0
def clean_pyinstaller():
    print('[installer] +--- CLEAN_PYINSTALLER ---')
    cwd = get_setup_dpath()
    ut.remove_files_in_dir(cwd, 'IBEISApp.pkg', recursive=False)
    ut.remove_files_in_dir(cwd, 'qt_menu.nib', recursive=False)
    ut.remove_files_in_dir(cwd, 'qt_menu.nib', recursive=False)
    ut.delete(join(cwd, 'dist/ibeis'))
    ut.delete(join(cwd, 'ibeis-win32-setup.exe'))
    ut.delete(join(cwd, 'build'))
    #ut.delete(join(cwd, 'pyrf'))
    #ut.delete(join(cwd, 'pyhesaff'))
    print('[installer] L___ FINSHED CLEAN_PYINSTALLER ___')
Exemplo n.º 40
0
def clean_pyinstaller():
    print('[installer] +--- CLEAN_PYINSTALLER ---')
    cwd = get_setup_dpath()
    ut.remove_files_in_dir(cwd, 'IBEISApp.pkg', recursive=False)
    ut.remove_files_in_dir(cwd, 'qt_menu.nib', recursive=False)
    ut.remove_files_in_dir(cwd, 'qt_menu.nib', recursive=False)
    ut.delete(join(cwd, 'dist/ibeis'))
    ut.delete(join(cwd, 'ibeis-win32-setup.exe'))
    ut.delete(join(cwd, 'build'))
    #ut.delete(join(cwd, 'pyrf'))
    #ut.delete(join(cwd, 'pyhesaff'))
    print('[installer] L___ FINSHED CLEAN_PYINSTALLER ___')
Exemplo n.º 41
0
 def delete_global_prefs(back):
     print('[back] delete_global_prefs')
     # TODO: Add are you sure dialog?
     utool.delete(utool.get_app_resource_dir('ibeis', 'global_cache'))
     pass
Exemplo n.º 42
0
def on_collect_request(collect_request, collecter_data, awaiting_data, shelve_path):
    """ Run whenever the collector recieves a message """
    import requests
    reply = {}
    action = collect_request['action']
    if VERBOSE_JOBS:
        print('...building action=%r response' % (action,))
    if action == 'notification':
        # From the Queue
        jobid = collect_request['jobid']
        awaiting_data[jobid] = collect_request['text']
        # Make waiting lock
        lock_filepath = join(shelve_path, '%s.lock' % (jobid, ))
        ut.touch(lock_filepath)
    elif action == 'store':
        # From the Engine
        engine_result = collect_request['engine_result']
        callback_url = collect_request['callback_url']
        callback_method = collect_request['callback_method']
        jobid = engine_result['jobid']

        # OLD METHOD
        # collecter_data[jobid] = engine_result
        collecter_data[jobid] = engine_result['exec_status']

        # NEW METHOD
        shelve_filepath = join(shelve_path, '%s.shelve' % (jobid, ))
        shelf = shelve.open(shelve_filepath, writeback=True)
        try:
            shelf[str('result')] = engine_result
        finally:
            shelf.close()

        # Delete the lock
        lock_filepath = join(shelve_path, '%s.lock' % (jobid, ))
        ut.delete(lock_filepath)

        if callback_url is not None:
            if callback_method is None:
                callback_method = 'post'
            else:
                callback_method = callback_method.lower()
            if VERBOSE_JOBS:
                print('calling callback_url using callback_method')
            try:
                # requests.get(callback_url)
                if callback_method == 'post':
                    requests.post(callback_url, data={'jobid': jobid})
                elif callback_method == 'get':
                    requests.get(callback_url, params={'jobid': jobid})
                elif callback_method == 'put':
                    requests.put(callback_url, data={'jobid': jobid})
                else:
                    raise ValueError('callback_method %r unsupported' %
                                     (callback_method, ))
            except Exception as ex:
                msg = (('ERROR in collector. '
                        'Tried to call callback_url=%r with callback_method=%r')
                       % (callback_url, callback_method, ))
                print(msg)
                ut.printex(ex, msg)
            #requests.post(callback_url)
        if VERBOSE_JOBS:
            print('stored result')
    elif action == 'job_status':
        # From a Client
        jobid = collect_request['jobid']
        if jobid in collecter_data:
            reply['jobstatus'] = 'completed'
            reply['exec_status'] = collecter_data[jobid]
        elif jobid in awaiting_data:
            reply['jobstatus'] = 'working'
        else:
            reply['jobstatus'] = 'unknown'
        reply['status'] = 'ok'
        reply['jobid'] = jobid
    elif action == 'job_result':
        # From a Client
        jobid = collect_request['jobid']
        try:
            # OLD METHOD
            # engine_result = collecter_data[jobid]
            # NEW METHOD
            shelve_filepath = join(shelve_path, '%s.shelve' % (jobid, ))
            shelf = shelve.open(shelve_filepath)
            try:
                engine_result = shelf[str('result')]
            finally:
                shelf.close()

            json_result = engine_result['json_result']
            reply['jobid'] = jobid
            reply['status'] = 'ok'
            # reply['json_result'] = json_result
            # We want to parse the JSON result here, since we need to live in
            # Python land for the rest of the call until the API wrapper
            # converts the Python objcets to JSON before the response is
            # generated.  This prevents the API from converting a Python string
            # of JSON to a JSON string of JSON, which is bad.
            reply['json_result'] = ut.from_json(json_result)
        except KeyError:
            reply['jobid'] = jobid
            reply['status'] = 'invalid'
            reply['json_result'] = None
    else:
        # Other
        print('...error unknown action=%r' % (action,))
        reply['status'] = 'error'
        reply['text'] = 'unknown action'
    return reply
Exemplo n.º 43
0
def simple_thumbnail_widget():
    r"""
    Very simple example to test thumbnails

    CommandLine:
        python -m guitool.api_thumb_delegate --test-simple_thumbnail_widget  --show
        python -m guitool.api_thumb_delegate --test-simple_thumbnail_widget  --show --tb

    Example:
        >>> # GUI_DOCTEST
        >>> from guitool.api_thumb_delegate import *  # NOQA
        >>> import guitool
        >>> guitool.ensure_qapp()  # must be ensured before any embeding
        >>> wgt = simple_thumbnail_widget()
        >>> ut.quit_if_noshow()
        >>> wgt.show()
        >>> guitool.qtapp_loop(wgt, frequency=100)
    """
    import guitool
    guitool.ensure_qapp()
    col_name_list = ['rowid', 'image_name', 'thumb']
    col_types_dict = {
        'thumb': 'PIXMAP',
    }

    guitool_test_thumbdir = ut.ensure_app_resource_dir('guitool', 'thumbs')
    ut.delete(guitool_test_thumbdir)
    ut.ensuredir(guitool_test_thumbdir)
    import vtool as vt
    from os.path import join

    def thumb_getter(id_, thumbsize=128):
        """ Thumb getters must conform to thumbtup structure """
        #print(id_)
        if id_ == 'doesnotexist.jpg':
            return None
            img_path = None
            img_size = (100, 100)
        else:
            img_path = ut.grab_test_imgpath(id_, verbose=False)
            img_size = vt.open_image_size(img_path)
        thumb_path = join(guitool_test_thumbdir, ut.hashstr(str(img_path)) + '.jpg')
        if id_ == 'carl.jpg':
            bbox_list = [(10, 10, 200, 200)]
            theta_list = [0]
        elif id_ == 'lena.png':
            #bbox_list = [(10, 10, 200, 200)]
            bbox_list = [None]
            theta_list = [None]
        else:
            bbox_list = []
            theta_list = []
        thumbtup = (thumb_path, img_path, img_size, bbox_list, theta_list)
        #print('thumbtup = %r' % (thumbtup,))
        return thumbtup
        #return None

    #imgname_list = sorted(ut.TESTIMG_URL_DICT.keys())
    imgname_list = ['carl.jpg', 'lena.png', 'patsy.jpg']

    imgname_list += ['doesnotexist.jpg']

    col_getter_dict = {
        'rowid': list(range(len(imgname_list))),
        'image_name': imgname_list,
        'thumb': thumb_getter
    }
    col_ider_dict = {
        'thumb': 'image_name',
    }
    col_setter_dict = {}
    editable_colnames = []
    sortby = 'rowid'
    get_thumb_size = lambda: 128  # NOQA
    col_width_dict = {}
    col_bgrole_dict = {}

    api = guitool.CustomAPI(
        col_name_list, col_types_dict, col_getter_dict,
        col_bgrole_dict, col_ider_dict, col_setter_dict,
        editable_colnames, sortby, get_thumb_size, True, col_width_dict)
    headers = api.make_headers(tblnice='Utool Test Images')

    wgt = guitool.APIItemWidget()
    wgt.change_headers(headers)
    wgt.resize(600, 400)
    #guitool.qtapp_loop(qwin=wgt, ipy=ipy, frequency=loop_freq)
    return wgt
Exemplo n.º 44
0
    def _train_setup(dark, voc_path, weight_path):

        class_list = []
        annotations_path = join(voc_path, "Annotations")
        imagesets_path = join(voc_path, "ImageSets")
        jpegimages_path = join(voc_path, "JPEGImages")
        label_path = join(voc_path, "labels")

        ut.delete(label_path)
        ut.ensuredir(label_path)

        def _convert_annotation(image_id):
            import xml.etree.ElementTree as ET

            def _convert(size, box):
                dw = 1.0 / size[0]
                dh = 1.0 / size[1]
                x = (box[0] + box[1]) / 2.0
                y = (box[2] + box[3]) / 2.0
                w = box[1] - box[0]
                h = box[3] - box[2]
                x = x * dw
                w = w * dw
                y = y * dh
                h = h * dh
                return (x, y, w, h)

            with open(join(label_path, "%s.txt" % (image_id,)), "w") as out_file:
                with open(join(annotations_path, "%s.xml" % (image_id,)), "r") as in_file:
                    tree = ET.parse(in_file)
                    root = tree.getroot()
                    size = root.find("size")
                    w = int(size.find("width").text)
                    h = int(size.find("height").text)

                    for obj in root.iter("object"):
                        if int(obj.find("difficult").text) == 1:
                            continue
                        class_ = obj.find("name").text
                        if class_ not in class_list:
                            class_list.append(class_)
                        class_id = class_list.index(class_)
                        xmlbox = obj.find("bndbox")
                        b = tuple(
                            map(
                                float,
                                [
                                    xmlbox.find("xmin").text,
                                    xmlbox.find("xmax").text,
                                    xmlbox.find("ymin").text,
                                    xmlbox.find("ymax").text,
                                ],
                            )
                        )
                        bb = _convert((w, h), b)
                        bb_str = " ".join([str(_) for _ in bb])
                        out_file.write("%s %s\n" % (class_id, bb_str))

        num_images = 0
        print("[pydarknet py train] Processing manifest...")
        manifest_filename = join(voc_path, "manifest.txt")
        with open(manifest_filename, "w") as manifest:
            # for dataset_name in ['train', 'val', 'test']:
            for dataset_name in ["train", "val"]:
                dataset_filename = join(imagesets_path, "Main", "%s.txt" % dataset_name)
                with open(dataset_filename, "r") as dataset:
                    image_id_list = dataset.read().strip().split()

                for image_id in image_id_list:
                    print("[pydarknet py train]     processing: %r" % (image_id,))
                    image_filepath = abspath(join(jpegimages_path, "%s.jpg" % image_id))
                    if exists(image_filepath):
                        manifest.write("%s\n" % (image_filepath,))
                        _convert_annotation(image_id)
                        num_images += 1

        print("[pydarknet py train] Processing config and pretrained weights...")
        # Load default config and pretrained weights
        config_filepath = ut.grab_file_url(DEFAULT_CONFIG_TEMPLATE_URL, appname="pydarknet")
        with open(config_filepath, "r") as config:
            config_template_str = config.read()

        config_filename = basename(config_filepath).replace(".template.", ".%d." % (len(class_list),))
        config_filepath = join(weight_path, config_filename)
        with open(config_filepath, "w") as config:
            replace_list = [
                ("_^_OUTPUT_^_", SIDES * SIDES * (BOXES * 5 + len(class_list))),
                ("_^_CLASSES_^_", len(class_list)),
                ("_^_SIDES_^_", SIDES),
                ("_^_BOXES_^_", BOXES),
            ]
            for needle, replacement in replace_list:
                config_template_str = config_template_str.replace(needle, str(replacement))
            config.write(config_template_str)

        class_filepath = "%s.classes" % (config_filepath,)
        with open(class_filepath, "w") as class_file:
            for class_ in class_list:
                class_file.write("%s\n" % (class_,))

        weight_filepath = ut.grab_file_url(DEFAULT_PRETRAINED_URL, appname="pydarknet")
        dark._load(config_filepath, weight_filepath)

        print("class_list = %r" % (class_list,))
        print("num_images = %r" % (num_images,))

        return manifest_filename, num_images, config_filepath, class_filepath
Exemplo n.º 45
0
 def close(temp):
     temp._check_open()
     if temp.delete and exists(temp.fpath):
         ut.delete(temp.fpath, verbose=temp.verbose)
     temp._isclosed = True
Exemplo n.º 46
0
def get_cnn_classifier_cameratrap_binary_training_images_pytorch(
    ibs,
    positive_imageset_id,
    negative_imageset_id,
    dest_path=None,
    valid_rate=0.2,
    image_size=224,
    purge=True,
    skip_rate=0.0,
    skip_rate_pos=0.0,
    skip_rate_neg=0.0,
):
    from os.path import join, expanduser
    import random
    import cv2

    if dest_path is None:
        dest_path = expanduser(join('~', 'Desktop', 'extracted'))

    name = 'classifier-cameratrap-pytorch'
    dbname = ibs.dbname
    name_path = join(dest_path, name)
    train_path = join(name_path, 'train')
    valid_path = join(name_path, 'val')

    train_pos_path = join(train_path, 'positive')
    train_neg_path = join(train_path, 'negative')
    valid_pos_path = join(valid_path, 'positive')
    valid_neg_path = join(valid_path, 'negative')

    if purge:
        ut.delete(name_path)

    ut.ensuredir(name_path)
    ut.ensuredir(train_path)
    ut.ensuredir(valid_path)

    ut.ensuredir(train_pos_path)
    ut.ensuredir(train_neg_path)
    ut.ensuredir(valid_pos_path)
    ut.ensuredir(valid_neg_path)

    train_gid_set = set(
        ibs.get_imageset_gids(
            ibs.get_imageset_imgsetids_from_text('TRAIN_SET')))

    positive_gid_set = set(ibs.get_imageset_gids(positive_imageset_id))
    negative_gid_set = set(ibs.get_imageset_gids(negative_imageset_id))

    candidate_gid_set = positive_gid_set | negative_gid_set
    candidate_gid_set = train_gid_set & candidate_gid_set

    for gid in candidate_gid_set:
        # args = (gid, )
        # logger.info('Processing GID: %r' % args)

        if skip_rate > 0.0 and random.uniform(0.0, 1.0) <= skip_rate:
            logger.info('\t Skipping - Sampling')
            continue

        if gid in positive_gid_set:
            category = 'positive'
        elif gid in negative_gid_set:
            category = 'negative'
        else:
            logger.info('\t Skipping - No Label')
            continue

        if (skip_rate_pos > 0.0 and category == 'positive'
                and random.uniform(0.0, 1.0) <= skip_rate_pos):
            logger.info('\t Skipping Positive')
            continue

        if (skip_rate_neg > 0.0 and category == 'negative'
                and random.uniform(0.0, 1.0) <= skip_rate_neg):
            logger.info('\t Skipping Negative')
            continue

        is_valid = random.uniform(0.0, 1.0) < valid_rate

        if category == 'positive':
            dest_path = valid_pos_path if is_valid else train_pos_path
        elif category == 'negative':
            dest_path = valid_neg_path if is_valid else train_neg_path
        else:
            raise ValueError()

        image = ibs.get_images(gid)
        image_ = cv2.resize(image, (image_size, image_size),
                            interpolation=cv2.INTER_LANCZOS4)

        values = (
            dbname,
            gid,
        )
        patch_filename = '%s_image_gid_%s.png' % values
        patch_filepath = join(dest_path, patch_filename)
        cv2.imwrite(patch_filepath, image_)

    return name_path
Exemplo n.º 47
0
def get_cnn_labeler_training_images_pytorch(
    ibs,
    dest_path=None,
    image_size=224,
    category_list=None,
    min_examples=10,
    category_mapping=None,
    viewpoint_mapping=None,
    purge=True,
    strict=True,
    skip_rate=0.0,
    valid_rate=0.2,
    use_axis_aligned_chips=False,
    train_gid_set=None,
):
    from os.path import join, expanduser, exists
    import random
    import cv2

    if dest_path is None:
        dest_path = expanduser(join('~', 'Desktop', 'extracted'))

    name = 'labeler-pytorch'
    dbname = ibs.dbname
    name_path = join(dest_path, name)
    train_path = join(name_path, 'train')
    valid_path = join(name_path, 'val')

    if purge:
        ut.delete(name_path)

    ut.ensuredir(name_path)
    ut.ensuredir(train_path)
    ut.ensuredir(valid_path)

    logger.info('category mapping = %s' % (ut.repr3(category_mapping), ))
    logger.info('viewpoint mapping = %s' % (ut.repr3(viewpoint_mapping), ))

    # train_gid_set = ibs.get_valid_gids()
    if train_gid_set is None:
        train_gid_set = set(
            ibs.get_imageset_gids(
                ibs.get_imageset_imgsetids_from_text('TRAIN_SET')))

    aids_list = ibs.get_image_aids(train_gid_set)
    # bboxes_list = [ ibs.get_annot_bboxes(aid_list) for aid_list in aids_list ]
    # aid_list = ibs.get_valid_aids()
    aid_list = ut.flatten(aids_list)
    # import random
    # random.shuffle(aid_list)
    # aid_list = sorted(aid_list[:100])
    species_list = ibs.get_annot_species_texts(aid_list)
    if category_mapping is not None:
        species_list = [
            category_mapping.get(species, species) for species in species_list
        ]
    species_set = set(species_list)
    yaw_list = ibs.get_annot_viewpoints(aid_list)

    if category_list is None:
        category_list = sorted(list(species_set))
        undesired_list = [
            'unspecified_animal',
            ibs.get_species_nice(ibs.const.UNKNOWN_SPECIES_ROWID),
        ]
        for undesired_species in undesired_list:
            if undesired_species in category_list:
                category_list.remove(undesired_species)
    category_set = set(category_list)

    # Filter the tup_list based on the requested categories
    tup_list = list(zip(aid_list, species_list, yaw_list))
    old_len = len(tup_list)
    tup_list = [(aid, species, viewpoint_mapping.get(species,
                                                     {}).get(yaw, yaw))
                for aid, species, yaw in tup_list if species in category_set]
    new_len = len(tup_list)
    logger.info('Filtered annotations: keep %d / original %d' %
                (new_len, old_len))

    # Skip any annotations that are of the wanted category and don't have a specified viewpoint
    counter = 0
    seen_dict = {}
    yaw_dict = {}
    for tup in tup_list:
        aid, species, yaw = tup
        # Keep track of the number of overall instances
        if species not in seen_dict:
            seen_dict[species] = 0
        seen_dict[species] += 1
        # Keep track of yaws that aren't None
        if yaw is not None:
            if species not in yaw_dict:
                yaw_dict[species] = {}
            if yaw not in yaw_dict[species]:
                yaw_dict[species][yaw] = 0
            yaw_dict[species][yaw] += 1
        else:
            counter += 1

    # Get the list of species that do not have enough viewpoint examples for training
    invalid_seen_set = set([])
    invalid_yaw_set = set([])
    for species in seen_dict:
        # Check that the number of instances is above the min_examples
        if seen_dict[species] < min_examples:
            invalid_seen_set.add(species)
            continue
        # If the species has viewpoints, check them as well
        if strict:
            if species in yaw_dict:
                # Check that all viewpoints exist
                # if len(yaw_dict[species]) < 8:
                #     invalid_yaw_set.add(species)
                #     continue
                # Check that all viewpoints have a minimum number of instances
                for yaw in yaw_dict[species]:
                    # assert yaw in ibs.const.VIEWTEXT_TO_YAW_RADIANS
                    if yaw_dict[species][yaw] < min_examples:
                        invalid_yaw_set.add(species)
                        continue
            else:
                invalid_yaw_set.add(species)
                continue

    logger.info('Null yaws: %d' % (counter, ))
    valid_seen_set = category_set - invalid_seen_set
    valid_yaw_set = valid_seen_set - invalid_yaw_set
    logger.info('Requested categories:')
    category_set = sorted(category_set)
    ut.print_list(category_set)
    # logger.info('Invalid yaw categories:')
    # ut.print_list(sorted(invalid_yaw_set))
    # logger.info('Valid seen categories:')
    # ut.print_list(sorted(valid_seen_set))
    logger.info('Valid yaw categories:')
    valid_yaw_set = sorted(valid_yaw_set)
    ut.print_list(valid_yaw_set)
    logger.info('Invalid seen categories (could not fulfill request):')
    invalid_seen_set = sorted(invalid_seen_set)
    ut.print_list(invalid_seen_set)

    skipped_yaw = 0
    skipped_seen = 0
    aid_list_ = []
    category_list_ = []
    for tup in tup_list:
        aid, species, yaw = tup
        if species in valid_yaw_set:
            # If the species is valid, but this specific annotation has no yaw, skip it
            if yaw is None:
                skipped_yaw += 1
                continue
            category = '%s:%s' % (species, yaw)
        elif species in valid_seen_set:
            category = '%s' % (species, )
        else:
            skipped_seen += 1
            continue
        aid_list_.append(aid)
        category_list_.append(category)
    logger.info('Skipped Yaw:  skipped %d / total %d' %
                (skipped_yaw, len(tup_list)))
    logger.info('Skipped Seen: skipped %d / total %d' %
                (skipped_seen, len(tup_list)))

    for category in sorted(set(category_list_)):
        logger.info('Making folder for %r' % (category, ))
        ut.ensuredir(join(train_path, category))
        ut.ensuredir(join(valid_path, category))

    config = {
        'dim_size': (image_size, image_size),
        'resize_dim': 'wh',
        'axis_aligned': use_axis_aligned_chips,
    }
    chip_list_ = ibs.depc_annot.get_property('chips',
                                             aid_list_,
                                             'img',
                                             config=config)

    # Get training data
    label_list = []
    for aid, chip, category in zip(aid_list_, chip_list_, category_list_):

        args = (aid, )
        logger.info('Processing AID: %r' % args)

        if skip_rate > 0.0 and random.uniform(0.0, 1.0) <= skip_rate:
            logger.info('\t Skipping')
            continue

        is_valid = random.uniform(0.0, 1.0) < valid_rate
        dest_path = valid_path if is_valid else train_path
        raw_path = join(dest_path, category)
        assert exists(dest_path)

        # Compute data
        values = (
            dbname,
            aid,
        )
        patch_filename = '%s_annot_aid_%s.png' % values
        patch_filepath = join(raw_path, patch_filename)
        cv2.imwrite(patch_filepath, chip)

        # Compute label
        label = '%s,%s' % (patch_filename, category)
        label_list.append(label)

    logger.info('Using labels for labeler training:')
    logger.info(ut.repr3(ut.dict_hist(category_list_)))

    return name_path
Exemplo n.º 48
0
def get_cnn_localizer_canonical_training_images_pytorch(
    ibs,
    species,
    dest_path=None,
    valid_rate=0.2,
    image_size=224,
    purge=True,
    skip_rate=0.0,
):
    from os.path import join, expanduser, exists
    from wbia.other.detectfuncs import _canonical_get_boxes
    import random
    import cv2

    if dest_path is None:
        dest_path = expanduser(join('~', 'Desktop', 'extracted'))

    name = 'localizer-canonical-pytorch'
    dbname = ibs.dbname
    name_path = join(dest_path, name)
    train_path = join(name_path, 'train')
    valid_path = join(name_path, 'val')

    if purge:
        ut.delete(name_path)

    ut.ensuredir(name_path)
    ut.ensuredir(train_path)
    ut.ensuredir(valid_path)

    train_gid_set = set(
        ibs.get_imageset_gids(
            ibs.get_imageset_imgsetids_from_text('TRAIN_SET')))
    train_gid_list = list(train_gid_set)
    aid_list_, bbox_list = _canonical_get_boxes(ibs, train_gid_list, species)

    config = {
        'dim_size': (image_size, image_size),
        'resize_dim': 'wh',
    }
    chip_list = ibs.depc_annot.get_property('chips',
                                            aid_list_,
                                            'img',
                                            config=config)
    for aid, chip, bbox in zip(aid_list_, chip_list, bbox_list):
        args = (aid, )
        logger.info('Processing AID: %r' % args)

        if skip_rate > 0.0 and random.uniform(0.0, 1.0) <= skip_rate:
            logger.info('\t Skipping - Sampling')
            continue

        is_valid = random.uniform(0.0, 1.0) < valid_rate
        dest_path = valid_path if is_valid else train_path

        index = 0
        while True:
            values = (
                dbname,
                aid,
                index,
            )
            patch_filename = '%s_image_aid_%s_%d.png' % values
            patch_filepath = join(dest_path, patch_filename)
            if not exists(patch_filepath):
                break
            index += 1

        index = 0
        while True:
            values = (
                dbname,
                aid,
                index,
            )
            label_filename = '%s_image_aid_%s_%d.csv' % values
            label_filepath = join(dest_path, label_filename)
            if not exists(label_filepath):
                break
            index += 1

        cv2.imwrite(patch_filepath, chip)
        with open(label_filepath, 'w') as label_file:
            bbox = list(bbox)
            for index in range(len(bbox)):
                bbox[index] = '%0.08f' % (bbox[index], )
            label_file.write('%s\n' % (','.join(bbox), ))

    return name_path
Exemplo n.º 49
0
def get_cnn_classifier_canonical_training_images_pytorch(
    ibs,
    species,
    dest_path=None,
    valid_rate=0.2,
    image_size=224,
    purge=True,
    skip_rate=0.0,
    skip_rate_pos=0.0,
    skip_rate_neg=0.0,
):
    from os.path import join, expanduser, exists
    import random
    import cv2

    if dest_path is None:
        dest_path = expanduser(join('~', 'Desktop', 'extracted'))

    name = 'classifier-canonical-pytorch'
    dbname = ibs.dbname
    name_path = join(dest_path, name)
    train_path = join(name_path, 'train')
    valid_path = join(name_path, 'val')

    train_pos_path = join(train_path, 'positive')
    train_neg_path = join(train_path, 'negative')
    valid_pos_path = join(valid_path, 'positive')
    valid_neg_path = join(valid_path, 'negative')

    if purge:
        ut.delete(name_path)

    ut.ensuredir(name_path)
    ut.ensuredir(train_path)
    ut.ensuredir(valid_path)

    ut.ensuredir(train_pos_path)
    ut.ensuredir(train_neg_path)
    ut.ensuredir(valid_pos_path)
    ut.ensuredir(valid_neg_path)

    train_gid_set = set(
        ibs.get_imageset_gids(
            ibs.get_imageset_imgsetids_from_text('TRAIN_SET')))
    aid_list = ut.flatten(ibs.get_image_aids(train_gid_set))
    aid_list = ibs.filter_annotation_set(aid_list, species=species)
    flag_list = ibs.get_annot_canonical(aid_list)

    bool_list = [flag is not None for flag in flag_list]
    aid_list = ut.compress(aid_list, bool_list)
    flag_list = ut.compress(flag_list, bool_list)

    config = {
        'dim_size': (image_size, image_size),
        'resize_dim': 'wh',
    }
    chip_list = ibs.depc_annot.get_property('chips',
                                            aid_list,
                                            'img',
                                            config=config)
    for aid, chip, flag in zip(aid_list, chip_list, flag_list):
        args = (aid, )
        logger.info('Processing AID: %r' % args)

        if skip_rate > 0.0 and random.uniform(0.0, 1.0) <= skip_rate:
            logger.info('\t Skipping - Sampling')
            continue

        assert flag is not None

        if flag:
            category = 'positive'
        else:
            category = 'negative'

        if (skip_rate_pos > 0.0 and category == 'positive'
                and random.uniform(0.0, 1.0) <= skip_rate_pos):
            logger.info('\t Skipping Positive')
            continue

        if (skip_rate_neg > 0.0 and category == 'negative'
                and random.uniform(0.0, 1.0) <= skip_rate_neg):
            logger.info('\t Skipping Negative')
            continue

        is_valid = random.uniform(0.0, 1.0) < valid_rate

        if category == 'positive':
            dest_path = valid_pos_path if is_valid else train_pos_path
        elif category == 'negative':
            dest_path = valid_neg_path if is_valid else train_neg_path
        else:
            raise ValueError()

        index = 0
        while True:
            values = (
                dbname,
                aid,
                index,
            )
            patch_filename = '%s_image_aid_%s_%d.png' % values
            patch_filepath = join(dest_path, patch_filename)
            if not exists(patch_filepath):
                break
            index += 1

        cv2.imwrite(patch_filepath, chip)

    return name_path
Exemplo n.º 50
0
    def fix_empty_dirs(drive):
        """
        # --- FIND EMPTY DIRECTORIES ---
        """
        print('Fixing Empty Dirs in %r' % (drive,))
        fidxs_list = ut.dict_take(drive.dpath_to_fidx, drive.dpath_list)
        isempty_flags = [len(fidxs) == 0 for fidxs in fidxs_list]
        empty_dpaths = ut.compress(drive.dpath_list, isempty_flags)

        def is_cplat_link(path_):
            try:
                if islink(path_):
                    return True
                os.listdir(d)
                return False
            except SystemErrors:
                return True
        valid_flags = [not is_cplat_link(d) for d  in empty_dpaths]
        if not all(valid_flags):
            print('Filtered windows links %r / %r' % (
                len(empty_dpaths) - sum(valid_flags), len(empty_dpaths)))
            #print(ut.list_str(empty_dpaths[0:10]))
            empty_dpaths = ut.compress(empty_dpaths, valid_flags)

        print('Found %r / %r empty_dpaths' % (len(empty_dpaths), len(drive.dpath_list)))
        print(ut.list_str(empty_dpaths[0:10]))

        # Ensure actually still empty
        current_contents = [ut.glob(d, with_dirs=False)
                            for d in ut.ProgIter(empty_dpaths, 'checking empty status')]
        current_lens = list(map(len, current_contents))
        assert not any(current_lens), 'some dirs are not empty'

        # n ** 2 check to get only the base directories
        isbase_dir = [
            not any([d.startswith(dpath_) and d != dpath_
                        for dpath_ in empty_dpaths])
            for d in ut.ProgIter(empty_dpaths, 'finding base dirs')
        ]
        base_empty_dirs = ut.compress(empty_dpaths, isbase_dir)
        def list_only_files(dpath):
            # glob is too slow
            for root, dirs, fpaths in os.walk(dpath):
                for fpath in fpaths:
                    yield fpath
        base_current_contents = [
            list(list_only_files(d))
            for d in ut.ProgIter(base_empty_dirs, 'checking emptyness', freq=10)]
        is_actually_empty = [len(fs) == 0 for fs in base_current_contents]
        not_really_empty = ut.compress(base_empty_dirs, ut.not_list(is_actually_empty))
        print('%d dirs are not actually empty' % (len(not_really_empty),))
        print('not_really_empty = %s' % (ut.list_str(not_really_empty[0:10]),))
        truly_empty_dirs = ut.compress(base_empty_dirs, is_actually_empty)

        def list_all(dpath):
            # glob is too slow
            for root, dirs, fpaths in os.walk(dpath):
                for dir_ in dirs:
                    yield dir_
                for fpath in fpaths:
                    yield fpath

        exclude_base_dirs = [join(drive.root_dpath, 'AppData')]
        exclude_end_dirs = ['__pycache__']
        truly_empty_dirs1 = truly_empty_dirs
        for ed in exclude_base_dirs:
            truly_empty_dirs1 = [
                d for d in truly_empty_dirs1
                if (
                    not any(d.startswith(ed) for ed in exclude_base_dirs) and
                    not any(d.endswith(ed) for ed in exclude_end_dirs)
                )
            ]
        # Ensure actually still empty (with recursive checks for hidden files)
        print('truly_empty_dirs1[::5] = %s' % (
            ut.list_str(truly_empty_dirs1[0::5], strvals=True),))
        #print('truly_empty_dirs1 = %s' % (ut.list_str(truly_empty_dirs1, strvals=True),))

        if not dryrun:
            # FIX PART
            #from os.path import normpath
            #for d in ut.ProgIter(truly_empty_dirs):
            #    break
            #    if ut.WIN32:
            #        # http://www.sevenforums.com/system-security/53095-file-folder-read-only-attribute-wont-disable.html
            #        ut.cmd('attrib', '-r', '-s', normpath(d), verbose=False)
            #x = ut.remove_fpaths(truly_empty_dirs, strict=False)

            print('Deleting %d truly_empty_dirs1' % (len(truly_empty_dirs1),))

            for d in ut.ProgIter(truly_empty_dirs1, 'DELETE empty dirs', freq=1000):  # NOQA
                ut.delete(d, quiet=True)

            if ut.WIN32 and False:
                # remove file that failed removing
                flags = list(map(exists, truly_empty_dirs1))
                truly_empty_dirs1 = ut.compress(truly_empty_dirs1, flags)
                for d in ut.ProgIter(truly_empty_dirs1, 'rming', freq=1000):
                    ut.cmd('rmdir', d)
Exemplo n.º 51
0
def ensure_pz_mtest_batchworkflow_test():
    r"""
    CommandLine:
        python -m ibeis.init.sysres --test-ensure_pz_mtest_batchworkflow_test
        python -m ibeis.init.sysres --test-ensure_pz_mtest_batchworkflow_test --reset
        python -m ibeis.init.sysres --test-ensure_pz_mtest_batchworkflow_test --reset

    Example:
        >>> # SCRIPT
        >>> from ibeis.init.sysres import *  # NOQA
        >>> ensure_pz_mtest_batchworkflow_test()
    """
    import ibeis
    ibeis.ensure_pz_mtest()
    workdir = ibeis.sysres.get_workdir()
    mtest_dbpath = join(workdir, 'PZ_MTEST')

    source_dbdir = mtest_dbpath
    dest_dbdir = join(workdir, 'PZ_BATCH_WORKFLOW_MTEST')

    if ut.get_argflag('--reset'):
        ut.delete(dest_dbdir)

    if ut.checkpath(dest_dbdir):
        return
    else:
        copy_ibeisdb(source_dbdir, dest_dbdir)

    ibs = ibeis.opendb('PZ_BATCH_WORKFLOW_MTEST')
    assert len(ibs.get_valid_aids()) == 119
    assert len(ibs.get_valid_nids()) == 41

    ibs.delete_all_encounters()

    aid_list = ibs.get_valid_aids()

    unixtime_list = ibs.get_annot_image_unixtimes(aid_list)
    untimed_aids = ut.compress(aid_list, [t == -1 for t in unixtime_list])

    ibs.get_annot_groundtruth(untimed_aids, aid_list)

    aids_list, nid_list = ibs.group_annots_by_name(aid_list)

    hourdiffs_list = ibs.get_name_hourdiffs(nid_list)

    encounter_aids_list = [[] for _ in range(4)]

    encounter_idx = 0

    for hourdiffs, aids in zip(hourdiffs_list, aids_list):
        #import scipy.spatial.distance as spdist
        if len(aids) == 1:
            encounter_aids_list[encounter_idx].extend(aids)
            encounter_idx = (encounter_idx + 1) % len(encounter_aids_list)
        else:
            for chunk in list(ut.ichunks(aids, 2)):
                encounter_aids_list[encounter_idx].extend(chunk)
                encounter_idx = (encounter_idx + 1) % len(encounter_aids_list)

            #import vtool as vt
            #import networkx as netx
            #nodes = list(range(len(aids)))
            #edges_pairs = vt.pdist_argsort(hourdiffs)
            #edge_weights = -hourdiffs[hourdiffs.argsort()]
            #netx_graph = make_netx_graph(edges_pairs, nodes, edge_weights)
            #cut_edges = netx.minimum_edge_cut(netx_graph)
            #netx_graph.remove_edges_from(cut_edges)
            #components = list(netx.connected_components(netx_graph))
            #components = ut.sortedby(components, list(map(len, components)), reverse=True)
            #print(components)
            #encounter_aids_list[0].extend(components[0])
            #for compoment in components:

            # TODO do max-nway cut
        #day_diffs = spdist.squareform(hourdiffs) / 24.0
        #print(ut.numpy_str(day_diffs, precision=2, suppress_small=True))
        #import itertools
        #compare_idxs = [(r, c) for r, c in itertools.product(range(len(aids)), range(len(aids))) if (c > r)]
        #print(len(aids))
    #def make_netx_graph(edges_pairs, nodes=None, edge_weights=None):
    #    import networkx as netx
    #    node_lbls = [('id_', 'int')]

    #    edge_lbls = [('weight', 'float')]
    #    edges = [(pair[0], pair[1], weight) for pair, weight in zip(edges_pairs, edge_weights)]

    #    print('make_netx_graph')
    #    # Make a graph between the chips
    #    netx_nodes = [(ntup[0], {key[0]: val for (key, val) in zip(node_lbls, ntup[1:])})
    #                  for ntup in iter(zip(nodes))]

    #    netx_edges = [(etup[0], etup[1], {key[0]: val for (key, val) in zip(edge_lbls, etup[2:])})
    #                  for etup in iter(edges)]
    #    netx_graph = netx.Graph()
    #    netx_graph.add_nodes_from(netx_nodes)
    #    netx_graph.add_edges_from(netx_edges)
    #    return netx_graph

    # Group into encounters based on old names
    gids_list = ibs.unflat_map(ibs.get_annot_image_rowids, encounter_aids_list)
    eid_list = ibs.new_encounters_from_images(gids_list)  # NOQA

    # Remove all names
    ibs.delete_annot_nids(aid_list)
Exemplo n.º 52
0
def ensure_pz_mtest_mergesplit_test():
    r"""
    Make a test database for MERGE and SPLIT cases

    CommandLine:
        python -m ibeis.init.sysres --test-ensure_pz_mtest_mergesplit_test

    Example:
        >>> # SCRIPT
        >>> from ibeis.init.sysres import *  # NOQA
        >>> ensure_pz_mtest_mergesplit_test()
    """
    import ibeis
    ibeis.ensure_pz_mtest()
    workdir = ibeis.sysres.get_workdir()
    mtest_dbpath = join(workdir, 'PZ_MTEST')

    source_dbdir = mtest_dbpath
    dest_dbdir = join(workdir, 'PZ_MERGESPLIT_MTEST')

    if ut.get_argflag('--reset'):
        ut.delete(dest_dbdir)
    if ut.checkpath(dest_dbdir):
        return

    copy_ibeisdb(source_dbdir, dest_dbdir)

    ibs = ibeis.opendb('PZ_MERGESPLIT_MTEST')
    assert len(ibs.get_valid_aids()) == 119
    assert len(ibs.get_valid_nids()) == 41

    aid_list = ibs.get_valid_aids()
    aids_list, nid_list = ibs.group_annots_by_name(aid_list)
    num_aids = list(map(len, aids_list))

    # num cases wanted
    num_merge = 3
    num_split = 1
    num_combo = 1

    # num inputs needed
    num_merge_names = num_merge
    num_split_names = num_split * 2
    num_combo_names = num_combo * 3

    total_names = num_merge_names + num_split_names + num_combo_names

    modify_aids = ut.take(aids_list, ut.list_argsort(num_aids, reverse=True)[0:total_names])

    merge_nids1 = ibs.make_next_nids(num_merge, location_text='XMERGE')
    merge_nids2 = ibs.make_next_nids(num_merge, location_text='XMERGE')
    split_nid  = ibs.make_next_nids(num_split, location_text='XSPLIT')[0]
    combo_nids = ibs.make_next_nids(num_combo * 2, location_text='XCOMBO')

    # the first 3 become merge cases
    #left = 0
    #right = left + num_merge
    for aids, nid1, nid2 in zip(modify_aids[0:3], merge_nids1, merge_nids2):
        #ibs.get_annot_nids(aids)
        aids_ = aids[::2]
        ibs.set_annot_name_rowids(aids_, [nid1] * len(aids_))
        ibs.set_annot_name_rowids(aids_, [nid2] * len(aids_))

    # the next 2 become split cases
    #left = right
    #right = left + num_split_names
    for aids in modify_aids[3:5]:
        ibs.set_annot_name_rowids(aids, [split_nid] * len(aids))

    #left = right
    #right = left + num_combo_names
    # The final 3 are a combination case
    for aids in modify_aids[5:8]:
        aids_even = aids[::2]
        aids_odd = aids[1::2]
        ibs.set_annot_name_rowids(aids_even, [combo_nids[0]] * len(aids_even))
        ibs.set_annot_name_rowids(aids_odd, [combo_nids[1]] * len(aids_odd))

    final_result = ibs.unflat_map(ibs.get_annot_nids, modify_aids)
    print('final_result = %s' % (ut.list_str(final_result),))
Exemplo n.º 53
0
 def delete_cache(back):
     """ Help -> Delete Directory Slots"""
     print('[back] delete_cache')
     utool.delete(back.ibs.get_cachedir())
     pass
Exemplo n.º 54
0
def TEST_GUI_ALL(ibs, back, gpath_list):
    """
    Creates a new database
    Adds test images
    Creates dummy ANNOTATIONS
    Selects things
    """
    # DELETE OLD
    print('[TEST] DELETE_OLD_DATABASE')
    work_dir   = sysres.get_workdir()
    new_dbname = 'testdb_guiall'
    new_dbdir = utool.truepath(utool.join(work_dir, new_dbname))
    ibs_dbdir = utool.truepath(ibs.dbdir)
    msg = 'must start in different dir new_dbdir=%r != ibs_dbdir=%r,' % (new_dbdir, ibs_dbdir)
    assert new_dbdir != ibs_dbdir, msg
    print('passed: ' + msg)
    utool.delete(new_dbdir, ignore_errors=False)
    #
    #
    # CREATE NEW
    print('[TEST] CREATE_NEW_DATABASE')
    back.new_database(new_dbdir)
    ibs = back.ibs  # The backend has a new ibeis do not use the old one

    # Dont refresh for speed
    _kwargs = {'refresh': False}
    #
    #
    # IMPORT IMAGES
    print('[TEST] IMPORT_TEST_GPATHS')
    print('gpath_list = ' + utool.indentjoin(gpath_list))
    gid_list = back.import_images(gpath_list=gpath_list, **_kwargs)
    print('\n'.join('  * gid_list[%d] = %r' % (count, gid) for count, gid in enumerate(gid_list)))
    assert len(gid_list) == len(gpath_list)
    #
    #
    # ADD ANNOTATIONS
    print('[TEST] ADD_ANNOTATIONS')
    def add_annot(gid, bbox, theta=0.0):
        aid = back.add_annot(gid=gid, bbox=bbox, theta=theta, **_kwargs)
        return aid

    preadd_aids = ibs.get_valid_aids()  # this should be []
    assert len(preadd_aids) == 0, 'there are already aids in the database!'
    print('preadd_aids = %r' % preadd_aids)

    aid1 = add_annot(gid_list[0], (50, 50, 100, 100), (np.tau / 8))
    aid2 = add_annot(gid_list[1], (50, 50, 100, 100))
    aid3 = add_annot(gid_list[2], (50, 50, 64, 64))
    aid4 = add_annot(gid_list[2], (50, 50, 200, 200))
    aid5 = add_annot(gid_list[1], (0, 0, 400, 400))

    print('aid1 = %r' % aid1)
    print('aid2 = %r' % aid2)
    print('aid3 = %r' % aid3)
    print('aid4 = %r' % aid4)
    print('aid5 = %r' % aid5)
    #
    #
    # SELECT ANNOTATIONS
    print('[TEST] SELECT ANNOTATION / Add Chips')
    # get_valid_aids seems to return aids in an arbitrary order, it's an SQL thing
    aid_list = sorted(ibs.get_valid_aids())
    print('\n'.join('  * aid_list[%d] = %r' % (count, aid) for count, aid in enumerate(aid_list)))

    back.select_aid(aid_list[0], show_image=True, **_kwargs)
    try:
        bbox_list = ibs.get_annot_bboxes(aid_list)
        assert bbox_list[0] == (50, 50, 100, 100)
    except AssertionError as ex:
        utool.printex(ex, key_list=['bbox_list', 'aid_list'])
        raise
    back.reselect_annotation(bbox=[51, 52, 103, 104])
    assert ibs.get_annot_bboxes(aid_list[0]) == (51, 52, 103, 104)

    back.compute_encounters()

    unixtime_list = [100, 23, 24]
    ibs.set_image_unixtime(gid_list, unixtime_list)

    back.compute_encounters()

    # Change some ANNOTATIONs

    #add_annot(gid_list[2], None)  # user selection
    #add_annot(None, [42, 42, 8, 8])  # back selection
    # I'm not sure how I want to integrate that IPython stuff
    return locals()
Exemplo n.º 55
0
 def delete_detection_models(back):
     print('[back] delete_detection_models')
     utool.delete(utool.get_app_resource_dir('ibeis', 'detectmodels'))
     pass
Exemplo n.º 56
0
def delete_testdbs():
    utool.delete(TESTDB0, ignore_errors=False)
    utool.delete(TESTDB1, ignore_errors=False)
    utool.delete(TESTDB_GUIALL, ignore_errors=False)
Exemplo n.º 57
0
 def delete_queryresults_dir(back):
     print('[back] delete_queryresults_dir')
     # TODO: Add are you sure dialog?
     utool.delete(back.ibs.qresdir)
     pass