def exec_(script): import utool as ut print('+**** exec %s script *******' % (script.type_)) print('repo = %r' % (repo,)) with ut.ChdirContext(repo.dpath): if script.is_fpath_valid(): normbuild_flag = '--no-rmbuild' if ut.get_argflag(normbuild_flag): ut.cmd(script.fpath + ' ' + normbuild_flag) else: ut.cmd(script.fpath) else: if script.text is not None: print('ABOUT TO EXECUTE') ut.print_code(script.text, 'bash') if ut.are_you_sure('execute above script?'): from os.path import join scriptdir = ut.ensure_app_resource_dir('utool', 'build_scripts') script_path = join(scriptdir, 'script_' + script.type_ + '_' + ut.hashstr27(script.text) + '.sh') ut.writeto(script_path, script.text) _ = ut.cmd('bash ', script_path) # NOQA else: print("CANT QUITE EXECUTE THIS YET") ut.print_code(script.text, 'bash') #os.system(scriptname) print('L**** exec %s script *******' % (script.type_))
def get_tomcat_startup_tmpdir(): dpath_list = [ #os.environ.get('CATALINA_TMPDIR', None), ut.ensure_app_resource_dir('ibeis', 'tomcat', 'ibeis_startup_tmpdir'), ] tomcat_startup_dir = ut.search_candidate_paths(dpath_list, verbose=True) return tomcat_startup_dir
def grab_mnist_siam_dataset(): r""" CommandLine: python -m ibeis_cnn.ingest_data --test-grab_mnist_siam_dataset --show Example: >>> # ENABLE_DOCTEST >>> from ibeis_cnn.ingest_data import * # NOQA >>> dataset = grab_mnist_siam_dataset() >>> ut.quit_if_noshow() >>> from ibeis_cnn import draw_results >>> #ibsplugin.rrr() >>> flat_metadata = {} >>> data, labels = dataset.subset('full') >>> ut.quit_if_noshow() >>> dataset.interact() >>> ut.show_if_requested() """ training_dpath = ut.ensure_app_resource_dir('ibeis_cnn', 'training') dataset = DataSet( name='mnist_pairs', training_dpath=training_dpath, data_shape=(28, 28, 1), ) try: dataset.load() except IOError: data_, labels_, metadata_ = ingest_helpers.grab_mnist2() data, labels = ingest_helpers.convert_category_to_siam_data( data_, labels_) dataset.save(data, labels, data_per_label=2) return dataset
def exec_(script): import utool as ut print("+**** exec %s script *******" % (script.type_)) print("repo = %r" % (repo,)) with ut.ChdirContext(repo.dpath): if script.is_fpath_valid(): normbuild_flag = "--no-rmbuild" if ut.get_argflag(normbuild_flag): ut.cmd(script.fpath + " " + normbuild_flag) else: ut.cmd(script.fpath) else: if script.text is not None: print("ABOUT TO EXECUTE") ut.print_code(script.text, "bash") if ut.are_you_sure("execute above script?"): from os.path import join scriptdir = ut.ensure_app_resource_dir("utool", "build_scripts") script_path = join( scriptdir, "script_" + script.type_ + "_" + ut.hashstr27(script.text) + ".sh" ) ut.writeto(script_path, script.text) _ = ut.cmd("bash ", script_path) # NOQA else: print("CANT QUITE EXECUTE THIS YET") ut.print_code(script.text, "bash") # os.system(scriptname) print("L**** exec %s script *******" % (script.type_))
def __init__(temp, delete=True, verbose=False): temp.delete = delete appname = 'ibeis' temp.dpath = ut.ensure_app_resource_dir(appname, 'tempfiles') temp.fpath = None temp.fname = None temp._isclosed = False temp.verbose = verbose temp._create_unique_file()
def render_html(html_str): """ makes a temporary html rendering """ import utool as ut from os.path import abspath import webbrowser html_dpath = ut.ensure_app_resource_dir('utool', 'temp_html') fpath = abspath(ut.unixjoin(html_dpath, 'temp.html')) url = 'file://' + fpath ut.writeto(fpath, html_str) webbrowser.open(url)
def render_latex_text(input_text, nest_in_doc=False, preamb_extra=None, appname='utool', verbose=None): """ compiles latex and shows the result """ import utool as ut if verbose is None: verbose = ut.VERBOSE dpath = ut.ensure_app_resource_dir(appname, 'latex_tmp') # put a latex framgent in a full document # print(input_text) fname = 'temp_render_latex' pdf_fpath = ut.compile_latex_text( input_text, dpath=dpath, fname=fname, preamb_extra=preamb_extra, verbose=verbose) ut.startfile(pdf_fpath) return pdf_fpath
def get_nth_test_schema_version(schema_spec, n=-1): """ Gets a fresh and empty test version of a schema Args: schema_spec (module): schema module to get nth version of n (int): version index (-1 is the latest) """ from dtool.sql_control import SQLDatabaseController dbname = schema_spec.__name__ print('[_SQL] getting n=%r-th version of %r' % (n, dbname)) version_expected = list(schema_spec.VALID_VERSIONS.keys())[n] cachedir = ut.ensure_app_resource_dir('ibeis_test') db_fname = 'test_%s.sqlite3' % dbname ut.delete(join(cachedir, db_fname)) db = SQLDatabaseController(cachedir, db_fname, text_factory=unicode) ensure_correct_version( None, db, version_expected, schema_spec, dobackup=False) return db
def grab_mnist_category_dataset(): r""" CommandLine: python -m ibeis_cnn grab_mnist_category_dataset python -m ibeis_cnn grab_mnist_category_dataset_float python -m ibeis_cnn grab_mnist_category_dataset --show Example: >>> # DISABLE_DOCTEST >>> from ibeis_cnn.ingest_data import * # NOQA >>> dataset = grab_mnist_category_dataset() >>> dataset.print_subset_info() >>> dataset.print_dir_tree() >>> ut.quit_if_noshow() >>> inter = dataset.interact() >>> ut.show_if_requested() """ import numpy as np training_dpath = ut.ensure_app_resource_dir('ibeis_cnn', 'training') dataset = DataSet(name='mnist_uint8', training_dpath=training_dpath, data_shape=(28, 28, 1)) try: dataset.load() except IOError: data, labels, metadata = ingest_helpers.grab_mnist1() # Get indicies of test / train split train_idxs = np.arange(60000) test_idxs = np.arange(10000) + 60000 # Give dataset the full data dataset.save(data, labels, metadata, data_per_label=1) # And the split sets dataset.add_split('train', train_idxs) dataset.add_split('test', test_idxs) dataset.clear_cache() dataset.ensure_symlinked() return dataset
def get_juction_dpath(): r""" Returns: str: junction_dpath CommandLine: python -m ibeis_cnn --tf get_juction_dpath --show Example: >>> # ENABLE_DOCTEST >>> from ibeis_cnn.dataset import * # NOQA >>> junction_dpath = get_juction_dpath() >>> result = ('junction_dpath = %s' % (str(junction_dpath),)) >>> print(result) >>> ut.quit_if_noshow() >>> ut.vd(junction_dpath) """ junction_dpath = ut.ensure_app_resource_dir('ibeis_cnn', 'training_junction') # Hacks to keep junction clean home_dlink = ut.truepath('~/training_junction') if not exists(home_dlink): ut.symlink(junction_dpath, home_dlink) ut.remove_broken_links(junction_dpath) return junction_dpath
def get_nth_test_schema_version(schema_spec, n=-1): """ Gets a fresh and empty test version of a schema Args: schema_spec (module): schema module to get nth version of n (int): version index (-1 is the latest) """ from wbia.dtool.sql_control import SQLDatabaseController dbname = schema_spec.__name__ logger.info('[_SQL] getting n=%r-th version of %r' % (n, dbname)) version_expected = list(schema_spec.VALID_VERSIONS.keys())[n] cachedir = ut.ensure_app_resource_dir('wbia_test') db_fname = 'test_%s.sqlite3' % dbname ut.delete(join(cachedir, db_fname)) db_uri = 'sqlite:///{}'.format(realpath(join(cachedir, db_fname))) db = SQLDatabaseController(db_uri, dbname) ensure_correct_version(None, db, version_expected, schema_spec, dobackup=False) return db
def test_reloading_metaclass(): r""" CommandLine: python -m utool.util_class --test-test_reloading_metaclass References: http://stackoverflow.com/questions/8122734/pythons-imp-reload-function-is-not-working Example: >>> # ENABLE_DOCTEST >>> from utool.util_class import * # NOQA >>> result = test_reloading_metaclass() >>> print(result) """ import utool as ut testdir = ut.ensure_app_resource_dir('utool', 'metaclass_tests') testfoo_fpath = ut.unixjoin(testdir, 'testfoo.py') # os.chdir(testdir) #with ut.ChdirContext(testdir, stay=ut.inIPython()): with ut.ChdirContext(testdir): foo_code1 = ut.codeblock( r''' # STARTBLOCK import utool as ut import six @six.add_metaclass(ut.ReloadingMetaclass) class Foo(object): def __init__(self): pass spamattr = 'version1' # ENDBLOCK ''' ) foo_code2 = ut.codeblock( r''' # STARTBLOCK import utool as ut import six @six.add_metaclass(ut.ReloadingMetaclass) class Foo(object): def __init__(self): pass def bar(self): return 'spam' eggsattr = 'version2' # ENDBLOCK ''' ) # Write a testclass to disk ut.delete(testfoo_fpath) ut.write_to(testfoo_fpath, foo_code1, verbose=True) testfoo = ut.import_module_from_fpath(testfoo_fpath) #import testfoo foo = testfoo.Foo() print('foo = %r' % (foo,)) assert not hasattr(foo, 'bar'), 'foo should not have a bar attr' ut.delete(testfoo_fpath + 'c') # remove the pyc file because of the identical creation time ut.write_to(testfoo_fpath, foo_code2, verbose=True) assert not hasattr(foo, 'bar'), 'foo should still not have a bar attr' foo.rrr() assert foo.bar() == 'spam' ut.delete(testfoo_fpath) print('Reloading worked nicely')
def simple_thumbnail_widget(): r""" Very simple example to test thumbnails CommandLine: python -m guitool.api_thumb_delegate --test-simple_thumbnail_widget --show python -m guitool.api_thumb_delegate --test-simple_thumbnail_widget --show --tb Example: >>> # GUI_DOCTEST >>> from guitool.api_thumb_delegate import * # NOQA >>> import guitool >>> guitool.ensure_qapp() # must be ensured before any embeding >>> wgt = simple_thumbnail_widget() >>> ut.quit_if_noshow() >>> wgt.show() >>> guitool.qtapp_loop(wgt, frequency=100) """ import guitool guitool.ensure_qapp() col_name_list = ['rowid', 'image_name', 'thumb'] col_types_dict = { 'thumb': 'PIXMAP', } guitool_test_thumbdir = ut.ensure_app_resource_dir('guitool', 'thumbs') ut.delete(guitool_test_thumbdir) ut.ensuredir(guitool_test_thumbdir) import vtool as vt from os.path import join def thumb_getter(id_, thumbsize=128): """ Thumb getters must conform to thumbtup structure """ #print(id_) if id_ == 'doesnotexist.jpg': return None img_path = None img_size = (100, 100) else: img_path = ut.grab_test_imgpath(id_, verbose=False) img_size = vt.open_image_size(img_path) thumb_path = join(guitool_test_thumbdir, ut.hashstr(str(img_path)) + '.jpg') if id_ == 'carl.jpg': bbox_list = [(10, 10, 200, 200)] theta_list = [0] elif id_ == 'lena.png': #bbox_list = [(10, 10, 200, 200)] bbox_list = [None] theta_list = [None] else: bbox_list = [] theta_list = [] thumbtup = (thumb_path, img_path, img_size, bbox_list, theta_list) #print('thumbtup = %r' % (thumbtup,)) return thumbtup #return None #imgname_list = sorted(ut.TESTIMG_URL_DICT.keys()) imgname_list = ['carl.jpg', 'lena.png', 'patsy.jpg'] imgname_list += ['doesnotexist.jpg'] col_getter_dict = { 'rowid': list(range(len(imgname_list))), 'image_name': imgname_list, 'thumb': thumb_getter } col_ider_dict = { 'thumb': 'image_name', } col_setter_dict = {} editable_colnames = [] sortby = 'rowid' get_thumb_size = lambda: 128 # NOQA col_width_dict = {} col_bgrole_dict = {} api = guitool.CustomAPI( col_name_list, col_types_dict, col_getter_dict, col_bgrole_dict, col_ider_dict, col_setter_dict, editable_colnames, sortby, get_thumb_size, True, col_width_dict) headers = api.make_headers(tblnice='Utool Test Images') wgt = guitool.APIItemWidget() wgt.change_headers(headers) wgt.resize(600, 400) #guitool.qtapp_loop(qwin=wgt, ipy=ipy, frequency=loop_freq) return wgt
def merge_datasets(dataset_list): """ Merges a list of dataset objects into a single combined dataset. """ def consensus_check_factory(): """ Returns a temporary function used to check that all incoming values with the same key are consistent """ from collections import defaultdict past_values = defaultdict(lambda: None) def consensus_check(value, key): assert past_values[key] is None or past_values[key] == value, ( 'key=%r with value=%r does not agree with past_value=%r' % (key, value, past_values[key])) past_values[key] = value return value return consensus_check total_num_labels = 0 total_num_data = 0 input_alias_list = [dataset.alias_key for dataset in dataset_list] alias_key = 'combo_' + ut.hashstr27(repr(input_alias_list), hashlen=8) training_dpath = ut.ensure_app_resource_dir('ibeis_cnn', 'training', alias_key) data_fpath = ut.unixjoin(training_dpath, alias_key + '_data.hdf5') labels_fpath = ut.unixjoin(training_dpath, alias_key + '_labels.hdf5') try: # Try and short circut cached loading merged_dataset = DataSet.from_alias_key(alias_key) return merged_dataset except (Exception, AssertionError) as ex: ut.printex(ex, 'alias definitions have changed. alias_key=%r' % (alias_key, ), iswarning=True) # Build the dataset consensus_check = consensus_check_factory() for dataset in dataset_list: print(ut.get_file_nBytes_str(dataset.data_fpath)) print(dataset.data_fpath_dict['full']) print(dataset.num_labels) print(dataset.data_per_label) total_num_labels += dataset.num_labels total_num_data += (dataset.data_per_label * dataset.num_labels) # check that all data_dims agree data_shape = consensus_check(dataset.data_shape, 'data_shape') data_per_label = consensus_check(dataset.data_per_label, 'data_per_label') # hack record this import numpy as np data_dtype = np.uint8 label_dtype = np.int32 data = np.empty((total_num_data, ) + data_shape, dtype=data_dtype) labels = np.empty(total_num_labels, dtype=label_dtype) #def iterable_assignment(): # pass data_left = 0 data_right = None labels_left = 0 labels_right = None for dataset in ut.ProgressIter(dataset_list, lbl='combining datasets', freq=1): X_all, y_all = dataset.subset('full') labels_right = labels_left + y_all.shape[0] data_right = data_left + X_all.shape[0] data[data_left:data_right] = X_all labels[labels_left:labels_right] = y_all data_left = data_right labels_left = labels_right ut.save_data(data_fpath, data) ut.save_data(labels_fpath, labels) labels = ut.load_data(labels_fpath) num_labels = len(labels) merged_dataset = DataSet.new_training_set( alias_key=alias_key, data_fpath=data_fpath, labels_fpath=labels_fpath, metadata_fpath=None, training_dpath=training_dpath, data_shape=data_shape, data_per_label=data_per_label, output_dims=1, num_labels=num_labels, ) return merged_dataset
def grab_liberty_siam_dataset(pairs=250000): """ References: http://www.cs.ubc.ca/~mbrown/patchdata/patchdata.html https://github.com/osdf/datasets/blob/master/patchdata/dataset.py Notes: "info.txt" contains the match information Each row of info.txt corresponds corresponds to a separate patch, with the patches ordered from left to right and top to bottom in each bitmap image. 3 types of metadata files info.txt - contains patch ids that correspond with the order of patches in the bmp images In the format: pointid, unused interest.txt - interest points corresponding to patches with patchids has same number of rows as info.txt In the format: reference image id, x, y, orientation, scale (in log2 units) m50_<d>_<d>_0.txt - matches files patchID1 3DpointID1 unused1 patchID2 3DpointID2 unused2 CommandLine: python -m ibeis_cnn.ingest_data --test-grab_liberty_siam_dataset --show Example: >>> # ENABLE_DOCTEST >>> from ibeis_cnn.ingest_data import * # NOQA >>> pairs = 500 >>> dataset = grab_liberty_siam_dataset(pairs) >>> ut.quit_if_noshow() >>> from ibeis_cnn import draw_results >>> #ibsplugin.rrr() >>> flat_metadata = {} >>> data, labels = dataset.subset('full') >>> ut.quit_if_noshow() >>> warped_patch1_list = data[::2] >>> warped_patch2_list = data[1::2] >>> dataset.interact() >>> ut.show_if_requested() """ datakw = { 'detector': 'dog', 'pairs': pairs, } assert datakw['detector'] in ['dog', 'harris'] assert pairs in [500, 50000, 100000, 250000] liberty_urls = { 'dog': 'http://www.cs.ubc.ca/~mbrown/patchdata/liberty.zip', 'harris': 'http://www.cs.ubc.ca/~mbrown/patchdata/liberty_harris.zip', } url = liberty_urls[datakw['detector']] ds_path = ut.grab_zipped_url(url) ds_name = splitext(basename(ds_path))[0] alias_key = 'liberty;' + ut.dict_str(datakw, nl=False, explicit=True) cfgstr = ','.join([str(val) for key, val in ut.iteritems_sorted(datakw)]) # TODO: allow a move of the base data prefix training_dpath = ut.ensure_app_resource_dir('ibeis_cnn', 'training', ds_name) if ut.get_argflag('--vtd'): ut.vd(training_dpath) ut.ensuredir(training_dpath) data_fpath = join(training_dpath, 'liberty_data_' + cfgstr + '.pkl') labels_fpath = join(training_dpath, 'liberty_labels_' + cfgstr + '.pkl') if not ut.checkpath(data_fpath, verbose=True): data, labels = ingest_helpers.extract_liberty_style_patches( ds_path, pairs) ut.save_data(data_fpath, data) ut.save_data(labels_fpath, labels) # hack for caching num_labels labels = ut.load_data(labels_fpath) num_labels = len(labels) dataset = DataSet.new_training_set( alias_key=alias_key, data_fpath=data_fpath, labels_fpath=labels_fpath, metadata_fpath=None, training_dpath=training_dpath, data_shape=(64, 64, 1), data_per_label=2, output_dims=1, num_labels=num_labels, ) return dataset
def get_ibeis_resource_dir(): return ut.ensure_app_resource_dir('ibeis')