def compute_thumbnails(depc, gid_list, config=None): r""" Computers the thumbnail for a given input image Args: depc (ibeis.depends_cache.DependencyCache): gid_list (list): list of image rowids config (dict): (default = None) Yields: (uri, int, int): tup CommandLine: ibeis --tf compute_thumbnails --show --db PZ_MTEST Example: >>> # ENABLE_DOCTEST >>> from ibeis.core_images import * # NOQA >>> import ibeis >>> defaultdb = 'testdb1' >>> ibs = ibeis.opendb(defaultdb=defaultdb) >>> depc = ibs.depc_image >>> gid_list = ibs.get_valid_gids()[0:10] >>> thumbs = depc.get_property('thumbnails', gid_list, 'img', config={'thumbsize': 221}) >>> ut.quit_if_noshow() >>> import plottool as pt >>> iteract_obj = pt.interact_multi_image.MultiImageInteraction(thumbs, nPerPage=4) >>> iteract_obj.start() >>> pt.show_if_requested() """ ibs = depc.controller draw_annots = config['draw_annots'] thumbsize = config['thumbsize'] if thumbsize is None: cfg = ibs.cfg.other_cfg thumbsize = cfg.thumb_size if draw_annots else cfg.thumb_bare_size thumbsize_list = [thumbsize] * len(gid_list) gpath_list = ibs.get_image_paths(gid_list) orient_list = ibs.get_image_orientation(gid_list) aids_list = ibs.get_image_aids(gid_list) if draw_annots: bboxes_list = ibs.unflat_map(ibs.get_annot_bboxes, aids_list) thetas_list = ibs.unflat_map(ibs.get_annot_thetas, aids_list) else: bboxes_list = [ [] for aids in aids_list ] thetas_list = [ [] for aids in aids_list ] # Execute all tasks in parallel args_list = zip(thumbsize_list, gpath_list, orient_list, bboxes_list, thetas_list) genkw = { 'ordered': False, 'chunksize': 256, 'freq': 50, #'adjust': True, 'force_serial': ibs.force_serial or config['force_serial'], } gen = ut.generate(draw_thumb_helper, args_list, nTasks=len(args_list), **genkw) for val in gen: yield val
def add_images_params_gen(gpath_list, **kwargs): """ generates values for add_images sqlcommands asychronously Args: gpath_list (list): Kwargs: ordered, force_serial, chunksize, prog, verbose, quiet, nTasks, freq, adjust Returns: generator: params_gen CommandLine: python -m ibeis.algo.preproc.preproc_image --exec-add_images_params_gen Example0: >>> # ENABLE_DOCTEST >>> from ibeis.algo.preproc.preproc_image import * # NOQA >>> from vtool.tests import grabdata >>> gpath_list = grabdata.get_test_gpaths(ndata=3) + ['doesnotexist.jpg'] >>> params_list = list(add_images_params_gen(gpath_list)) >>> assert str(params_list[0][0]) == '66ec193a-1619-b3b6-216d-1784b4833b61', 'UUID gen method changed' >>> assert str(params_list[0][3]) == 'easy1.JPG', 'orig name is different' >>> assert params_list[3] is None """ #preproc_args = [(gpath, kwargs) for gpath in gpath_list] #print('[about to parse]: gpath_list=%r' % (gpath_list,)) params_gen = ut.generate(parse_imageinfo, gpath_list, adjust=True, force_serial=True, **kwargs) return params_gen
def compute_extramargin_detectchip(ibs, aid_list, config2_=None, species=None, FACTOR=4): #from vtool import chip as ctool #from vtool import image as gtool arg_list, newsize_list, halfoffset_cs_list = get_extramargin_detectchip_info( ibs, aid_list, config2_=config2_, species=species, FACTOR=FACTOR) # Again, it seems we cannot use warpAffine in parallel loops extramargin_fpath_list = list(ut.generate( gen_detectchip, arg_list, ordered=True, force_serial=True)) probchip_extramargin_fpath_list = [fpath.replace('detectchip', 'probchip') for fpath in extramargin_fpath_list] return extramargin_fpath_list, probchip_extramargin_fpath_list, halfoffset_cs_list
def compute_fgweights(ibs, aid_list, config2_=None): """ Example: >>> # SLOW_DOCTEST >>> from ibeis.algo.preproc.preproc_featweight import * # NOQA >>> import ibeis >>> ibs = ibeis.opendb('testdb1') >>> aid_list = ibs.get_valid_aids()[1:2] >>> config2_ = None >>> featweight_list = compute_fgweights(ibs, aid_list) >>> result = np.array_str(featweight_list[0][0:3], precision=3) >>> print(result) [ 0.125 0.061 0.053] """ nTasks = len(aid_list) print('[preproc_featweight.compute_fgweights] Preparing to compute %d fgweights' % (nTasks,)) probchip_fpath_list = preproc_probchip.compute_and_write_probchip(ibs, aid_list, config2_=config2_) chipsize_list = ibs.get_annot_chip_sizes(aid_list, config2_=config2_) #if ut.DEBUG2: # from PIL import Image # probchip_size_list = [Image.open(fpath).size for fpath in probchip_fpath_list] # NOQA # #with ut.embed_on_exception_context: # # does not need to happen anymore # assert chipsize_list == probchip_size_list, 'probably need to clear chip or probchip cache' kpts_list = ibs.get_annot_kpts(aid_list, config2_=config2_) # Force grayscale reading of chips probchip_list = [vt.imread(fpath, grayscale=True) if exists(fpath) else None for fpath in probchip_fpath_list] print('[preproc_featweight.compute_fgweights] Computing %d fgweights' % (nTasks,)) arg_iter = zip(aid_list, kpts_list, probchip_list, chipsize_list) featweight_gen = ut.generate(gen_featweight_worker, arg_iter, nTasks=nTasks, ordered=True, freq=10) featweight_param_list = list(featweight_gen) #arg_iter = zip(aid_list, kpts_list, probchip_list) #featweight_param_list1 = [gen_featweight_worker((aid, kpts, probchip)) for #aid, kpts, probchip in arg_iter] #featweight_aids = ut.get_list_column(featweight_param_list, 0) featweight_list = ut.get_list_column(featweight_param_list, 1) print('[preproc_featweight.compute_fgweights] Done computing %d fgweights' % (nTasks,)) return featweight_list
def add_images_params_gen(gpath_list, **kwargs): """ generates values for add_images sqlcommands asychronously TEST CODE: from ibeis.dev.all_imports import * gpath_list = grabdata.get_test_gpaths(ndata=3) + ['doesnotexist.jpg'] params_list = list(preproc_image.add_images_params_gen(gpath_list)) <CYTH: yeilds=tup> cdef: list gpath_list dict kwargs </CYTH> """ #preproc_args = [(gpath, kwargs) for gpath in gpath_list] #print('[about to parse]: gpath_list=%r' % (gpath_list,)) params_gen = utool.generate(parse_imageinfo, gpath_list, **kwargs) return params_gen
def _test_buffered_generator_general2(bgfunc, bgargs, fgfunc, target_looptime=1.0, serial_cheat=1, buffer_size=2, show_serial=True): """ # We are going to generate output of bgfunc in the background while # fgfunc is running in the foreground. fgfunc takes results of bffunc as # args. # --- Hyperparams target_looptime = 1.5 # maximum time to run all loops """ import utool as ut with ut.Timer('One* call to bgfunc') as t_bgfunc: results = [bgfunc(arg) for arg in bgargs] bgfunctime = t_bgfunc.ellapsed / len(bgargs) #fgfunc = ut.is_prime with ut.Timer('One* call to fgfunc') as t_fgfunc: [fgfunc(x) for x in results] fgfunctime = t_fgfunc.ellapsed / len(bgargs) # compute amount of loops to run est_looptime = (bgfunctime + fgfunctime) _num_loops = round(target_looptime // est_looptime) num_data = int(_num_loops // len(bgargs)) num_loops = int(num_data * len(bgargs)) serial_cheat = min(serial_cheat, num_data) data = ut.flatten([bgargs] * num_data) est_tfg = fgfunctime * num_loops est_tbg = bgfunctime * num_loops est_needed_buffers = fgfunctime / bgfunctime print('Estimated stats' + ut.repr4(ut.dict_subset(locals(), [ 'num_loops', 'bgfunctime', 'fgfunctime', 'est_tfg', 'est_tbg', 'serial_cheat', 'buffer_size', 'est_needed_buffers', ]))) if show_serial: with ut.Timer('serial') as t1: # cheat for serial to make it go faster for x in map(bgfunc, data[:len(data) // serial_cheat]): fgfunc(x) t_serial = serial_cheat * t1.ellapsed print('...toc(\'adjusted_serial\') = %r' % (t_serial)) with ut.Timer('ut.buffered_generator') as t2: gen_ = ut.buffered_generator(map(bgfunc, data), buffer_size=buffer_size) for x in gen_: fgfunc(x) with ut.Timer('ut.generate') as t3: gen_ = ut.generate(bgfunc, data, chunksize=buffer_size, quiet=1, verbose=0) for x in gen_: fgfunc(x) # Compare theoretical vs practical efficiency print('\n Theoretical Results') def parallel_efficiency(ellapsed, est_tfg, est_tbg): return (1 - ((ellapsed - est_tfg) / est_tbg)) * 100 if show_serial: print('Theoretical gain (serial) = %.3f%%' % ( parallel_efficiency(t_serial, est_tfg, est_tbg),)) print('Theoretical gain (ut.buffered_generator) = %.3f%%' % ( parallel_efficiency(t2.ellapsed, est_tfg, est_tbg),)) print('Theoretical gain (ut.generate) = %.2f%%' % ( parallel_efficiency(t3.ellapsed, est_tfg, est_tbg),)) if show_serial: prac_tbg = t_serial - est_tfg print('\n Practical Results') print('Practical gain (serial) = %.3f%%' % ( parallel_efficiency(t1.ellapsed, est_tfg, prac_tbg),)) print('Practical gain (ut.buffered_generator) = %.3f%%' % ( parallel_efficiency(t2.ellapsed, est_tfg, prac_tbg),)) print('Practical gain (ut.generate) = %.2f%%' % ( parallel_efficiency(t3.ellapsed, est_tfg, prac_tbg),))
def _test_buffered_generator_general(func, args, sleepfunc, target_looptime=1.0, serial_cheat=1, argmode=False, buffer_size=2): """ # We are going to generate output of func in the background while sleep # func is running in the foreground # --- Hyperparams target_looptime = 1.5 # maximum time to run all loops """ import utool as ut #serial_cheat = 1 # approx division factor to run serial less times show_serial = True # target_looptime < 10. # 3.0 with ut.Timer('One* call to func') as t_fgfunc: results = [func(arg) for arg in args] functime = t_fgfunc.ellapsed / len(args) #sleepfunc = ut.is_prime with ut.Timer('One* call to sleep func') as t_sleep: if argmode: [sleepfunc(x) for x in results] else: [sleepfunc() for x in results] sleeptime = t_sleep.ellapsed / len(args) # compute amount of loops to run _num_loops = round(target_looptime // (functime + sleeptime)) num_data = int(_num_loops // len(args)) num_loops = int(num_data * len(args)) serial_cheat = min(serial_cheat, num_data) data = ut.flatten([args] * num_data) est_tsleep = sleeptime * num_loops est_tfunc = functime * num_loops est_needed_buffers = sleeptime / functime print('Estimated stats' + ut.repr4(ut.dict_subset(locals(), [ 'num_loops', 'functime', 'sleeptime', 'est_tsleep', 'est_tfunc', 'serial_cheat', 'buffer_size', 'est_needed_buffers', ]))) if show_serial: with ut.Timer('serial') as t1: # cheat for serial to make it go faster for x in map(func, data[:len(data) // serial_cheat]): if argmode: sleepfunc(x) else: sleepfunc() t_serial = serial_cheat * t1.ellapsed print('...toc(\'adjusted_serial\') = %r' % (t_serial)) with ut.Timer('ut.buffered_generator') as t2: gen_ = ut.buffered_generator(map(func, data), buffer_size=buffer_size) for x in gen_: if argmode: sleepfunc(x) else: sleepfunc() with ut.Timer('ut.generate') as t3: gen_ = ut.generate(func, data, chunksize=buffer_size, quiet=1, verbose=0) for x in gen_: if argmode: sleepfunc(x) else: sleepfunc( ) # Compare theoretical vs practical efficiency print('\n Theoretical Results') def parallel_efficiency(ellapsed, est_tsleep, est_tfunc): return (1 - ((ellapsed - est_tsleep) / est_tfunc)) * 100 if show_serial: print('Theoretical gain (serial) = %.3f%%' % ( parallel_efficiency(t_serial, est_tsleep, est_tfunc),)) print('Theoretical gain (ut.buffered_generator) = %.3f%%' % ( parallel_efficiency(t2.ellapsed, est_tsleep, est_tfunc),)) print('Theoretical gain (ut.generate) = %.2f%%' % ( parallel_efficiency(t3.ellapsed, est_tsleep, est_tfunc),)) if show_serial: prac_tfunc = t_serial - est_tsleep print('\n Practical Results') print('Practical gain (serial) = %.3f%%' % ( parallel_efficiency(t1.ellapsed, est_tsleep, prac_tfunc),)) print('Practical gain (ut.buffered_generator) = %.3f%%' % ( parallel_efficiency(t2.ellapsed, est_tsleep, prac_tfunc),)) print('Practical gain (ut.generate) = %.2f%%' % ( parallel_efficiency(t3.ellapsed, est_tsleep, prac_tfunc),))
def add_images_json(ibs, image_uri_list, image_uuid_list, image_width_list, image_height_list, image_orig_name_list=None, image_ext_list=None, image_time_posix_list=None, image_gps_lat_list=None, image_gps_lon_list=None, image_orientation_list=None, image_notes_list=None, **kwargs): """ REST: Method: POST URL: /api/image/json/ Ignore: sudo pip install boto Args: image_uri_list (list) : list of string image uris, most likely HTTP(S) or S3 encoded URLs. Alternatively, this can be a list of dictionaries (JSON objects) that specify AWS S3 stored assets. An example below: image_uri_list = [ 'http://domain.com/example/asset1.png', '/home/example/Desktop/example/asset2.jpg', 's3://s3.amazon.com/example-bucket-2/asset1-in-bucket-2.tif', { 'bucket' : 'example-bucket-1', 'key' : 'example/asset1.png', 'auth_domain' : None, # Uses localhost 'auth_access_id' : None, # Uses system default 'auth_secret_key' : None, # Uses system default }, { 'bucket' : 'example-bucket-1', 'key' : 'example/asset2.jpg', # if unspecified, auth uses localhost and system defaults }, { 'bucket' : 'example-bucket-2', 'key' : 'example/asset1-in-bucket-2.tif', 'auth_domain' : 's3.amazon.com', 'auth_access_id' : '____________________', 'auth_secret_key' : '________________________________________', }, ] Note that you cannot specify AWS authentication access ids or secret keys using string uri's. For specific authentication methods, please use the latter list of dictionaries. image_uuid_list (list of str) : list of image UUIDs to be used in IBEIS IA image_width_list (list of int) : list of image widths image_height_list (list of int) : list of image heights image_orig_name_list (list of str): list of original image names image_ext_list (list of str): list of original image names image_time_posix_list (list of int): list of image's POSIX timestamps image_gps_lat_list (list of float): list of image's GPS latitude values image_gps_lon_list (list of float): list of image's GPS longitude values image_orientation_list (list of int): list of image's orientation flags image_notes_list (list of str) : optional list of any related notes with the images **kwargs : key-value pairs passed to the ibs.add_images() function. CommandLine: python -m ibeis.web.app --test-add_images_json Example: >>> # WEB_DOCTEST >>> from ibeis.control.IBEISControl import * # NOQA >>> import ibeis >>> web_instance = ibeis.opendb(db='testdb1') >>> _payload = { >>> 'image_uri_list': [ >>> 'https://upload.wikimedia.org/wikipedia/commons/4/49/Zebra_running_Ngorongoro.jpg', >>> { >>> 'bucket' : 'test-asset-store', >>> 'key' : 'caribwhale/20130903-JAC-0002.JPG', >>> }, >>> ], >>> 'image_uuid_list': [ >>> uuid.UUID('7fea8101-7dec-44e3-bf5d-b8287fd231e2'), >>> uuid.UUID('c081119a-e08e-4863-a710-3210171d27d6'), >>> ], >>> 'image_width_list': [ >>> 1992, >>> 1194, >>> ], >>> 'image_height_list': [ >>> 1328, >>> 401, >>> ], >>> } >>> gid_list = ibeis.web.app.add_images_json(web_instance, **_payload) >>> print(gid_list) >>> print(web_instance.get_image_uuids(gid_list)) >>> print(web_instance.get_image_uris(gid_list)) >>> print(web_instance.get_image_paths(gid_list)) >>> print(web_instance.get_image_uris_original(gid_list)) """ def _get_standard_ext(gpath): ext = splitext(gpath)[1].lower() return '.jpg' if ext == '.jpeg' else ext def _parse_imageinfo(index): def _resolve_uri(): list_ = image_uri_list if list_ is None or index >= len(list_) or list_[index] is None: raise ValueError('Must specify all required fields') value = list_[index] if isinstance(value, dict): value = ut.s3_dict_encode_to_str(value) return value def _resolve(list_, default='', assert_=False): if list_ is None or index >= len(list_) or list_[index] is None: if assert_: raise ValueError('Must specify all required fields') return default return list_[index] uri = _resolve_uri() orig_gname = basename(uri) ext = _get_standard_ext(uri) uuid_ = _resolve(image_uuid_list, assert_=True) if isinstance(uuid_, six.string_types): uuid_ = uuid.UUID(uuid_) param_tup = ( uuid_, uri, uri, _resolve(image_orig_name_list, default=orig_gname), _resolve(image_ext_list, default=ext), int(_resolve(image_width_list, assert_=True)), int(_resolve(image_height_list, assert_=True)), int(_resolve(image_time_posix_list, default=-1)), float(_resolve(image_gps_lat_list, default=-1.0)), float(_resolve(image_gps_lon_list, default=-1.0)), int(_resolve(image_orientation_list, default=0)), _resolve(image_notes_list), ) return param_tup # TODO: FIX ME SO THAT WE DON'T HAVE TO LOCALIZE EVERYTHING kwargs['auto_localize'] = kwargs.get('auto_localize', True) kwargs['sanitize'] = kwargs.get('sanitize', False) index_list = range(len(image_uri_list)) params_gen = ut.generate(_parse_imageinfo, index_list, adjust=True, force_serial=True, **kwargs) params_gen = list(params_gen) gpath_list = [ _[0] for _ in params_gen ] gid_list = ibs.add_images(gpath_list, params_list=params_gen, **kwargs) # NOQA # return gid_list image_uuid_list = ibs.get_image_uuids(gid_list) return image_uuid_list
def chip_tester(): import plottool as pt pt.ensure_pylab_qt4() from ibeis.core_annots import * # NOQA import ibeis defaultdb = 'GZ_ALL' ibs = ibeis.opendb(defaultdb=defaultdb) aid_list = ibs.get_valid_aids() depc = ibs.depc_annot chips_orig = depc.get_property('chips', aid_list, 'img', config={}) chips_aeq = depc.get_property('chips', aid_list, 'img', config={'adapteq': True}) chips_heq = depc.get_property('chips', aid_list, 'img', config={'histeq': True}) import pyhesaff nkpts_list = np.array(list(ut.generate(pyhesaff.detect_num_kpts_in_image, chips_orig, force_serial=ibs.force_serial))) nkpts_list = np.array(nkpts_list) nfeats_orig = np.array(ibs.depc_annot.get('feat', aid_list, 'num_feats')) nfeats_hteq = np.array(ibs.depc_annot.get('feat', aid_list, 'num_feats', config={'histeq': True})) nfeats_ateq = np.array(ibs.depc_annot.get('feat', aid_list, 'num_feats', config={'adapteq': True})) sortx = np.array(nfeats_orig).argsort() sortx = np.array(nfeats_hteq).argsort() sortx = np.array(nfeats_ateq).argsort() aids = ut.take(aid_list, sortx) chips = chips_orig chips_bad = ut.take(chips, sortx) chips_good = ut.take(chips, sortx[::-1]) import ibeis.viz.interact.interact_chip ibeis.viz.interact.interact_chip.interact_multichips(ibs, aids) iteract_obj = pt.interact_multi_image.MultiImageInteraction(chips_bad, nPerPage=15) iteract_obj.start() iteract_obj = pt.interact_multi_image.MultiImageInteraction(chips_good, nPerPage=15) iteract_obj.start() x = sklearn.cluster.KMeans(2) x.fit(np.nan_to_num(measures)) import vtool.quality_classifier from vtool.quality_classifier import contrast_measures chips128 = depc.get_property('chips', aid_list, 'img', config={'dim_size': 256}) gray_chips = [vt.convert_colorspace(x, 'GRAY') for x in ut.ProgIter(chips128)] measures = list(ut.generate(contrast_measures, gray_chips, force_serial=ibs.force_serial)) measures = np.array(measures) measures = np.nan_to_num(measures) y = measures.T[3] sortx = y.argsort() ys = y.take(sortx) pca = sklearn.decomposition.PCA(1) pca.fit(measures) pca_measure = pca.transform(measures) nfeats_white = (nfeats_orig - nfeats_orig.mean()) / nfeats_orig.std() pca_white = (pca_measure - pca_measure.mean()) / pca_measure.std() sortx = nfeats_white.argsort() pt.plt.plot(pca_white[sortx], 'x') pt.plt.plot(nfeats_white[sortx], '.') pyhesaff.detect_feats_in_image svc = sklearn.svm.LinearSVC() svc.fit(measures, nfeats_orig > 500) svc.predict(measures) == (nfeats_orig > 500) svr = sklearn.svm.LinearSVR() svr.fit(measures, nfeats_orig) svr.predict(measures) depc['feat'].get_config_history(z1) depc['feat'].get_config_history(z2) pt.plt.plot(nfeats_hteq[sortx], 'x') pt.plt.plot(nfeats_orig[sortx], '.') pt.plt.plot(nfeats_ateq[sortx], 'o') z1 = ibs.depc_annot.get_rowids('feat', aid_list, config={'histeq': True}) z2 = ibs.depc_annot.get_rowids('feat', aid_list) assert len(set(z1).intersection(z2)) == 0
def add_images_json(ibs, image_uri_list, image_uuid_list, image_width_list, image_height_list, image_orig_name_list=None, image_ext_list=None, image_time_posix_list=None, image_gps_lat_list=None, image_gps_lon_list=None, image_orientation_list=None, image_notes_list=None, **kwargs): """ REST: Method: POST URL: /api/image/json/ Ignore: sudo pip install boto Args: image_uri_list (list) : list of string image uris, most likely HTTP(S) or S3 encoded URLs. Alternatively, this can be a list of dictionaries (JSON objects) that specify AWS S3 stored assets. An example below: image_uri_list = [ 'http://domain.com/example/asset1.png', '/home/example/Desktop/example/asset2.jpg', 's3://s3.amazon.com/example-bucket-2/asset1-in-bucket-2.tif', { 'bucket' : 'example-bucket-1', 'key' : 'example/asset1.png', 'auth_domain' : None, # Uses localhost 'auth_access_id' : None, # Uses system default 'auth_secret_key' : None, # Uses system default }, { 'bucket' : 'example-bucket-1', 'key' : 'example/asset2.jpg', # if unspecified, auth uses localhost and system defaults }, { 'bucket' : 'example-bucket-2', 'key' : 'example/asset1-in-bucket-2.tif', 'auth_domain' : 's3.amazon.com', 'auth_access_id' : '____________________', 'auth_secret_key' : '________________________________________', }, ] Note that you cannot specify AWS authentication access ids or secret keys using string uri's. For specific authentication methods, please use the latter list of dictionaries. image_uuid_list (list of str) : list of image UUIDs to be used in IBEIS IA image_width_list (list of int) : list of image widths image_height_list (list of int) : list of image heights image_orig_name_list (list of str): list of original image names image_ext_list (list of str): list of original image names image_time_posix_list (list of int): list of image's POSIX timestamps image_gps_lat_list (list of float): list of image's GPS latitude values image_gps_lon_list (list of float): list of image's GPS longitude values image_orientation_list (list of int): list of image's orientation flags image_notes_list (list of str) : optional list of any related notes with the images **kwargs : key-value pairs passed to the ibs.add_images() function. CommandLine: python -m ibeis.web.app --test-add_images_json Example: >>> # WEB_DOCTEST >>> from ibeis.control.IBEISControl import * # NOQA >>> import ibeis >>> web_instance = ibeis.opendb(db='testdb1') >>> _payload = { >>> 'image_uri_list': [ >>> 'https://upload.wikimedia.org/wikipedia/commons/4/49/Zebra_running_Ngorongoro.jpg', >>> { >>> 'bucket' : 'test-asset-store', >>> 'key' : 'caribwhale/20130903-JAC-0002.JPG', >>> }, >>> ], >>> 'image_uuid_list': [ >>> uuid.UUID('7fea8101-7dec-44e3-bf5d-b8287fd231e2'), >>> uuid.UUID('c081119a-e08e-4863-a710-3210171d27d6'), >>> ], >>> 'image_width_list': [ >>> 1992, >>> 1194, >>> ], >>> 'image_height_list': [ >>> 1328, >>> 401, >>> ], >>> } >>> gid_list = ibeis.web.app.add_images_json(web_instance, **_payload) >>> print(gid_list) >>> print(web_instance.get_image_uuids(gid_list)) >>> print(web_instance.get_image_uris(gid_list)) >>> print(web_instance.get_image_paths(gid_list)) >>> print(web_instance.get_image_uris_original(gid_list)) """ def _get_standard_ext(gpath): ext = splitext(gpath)[1].lower() return '.jpg' if ext == '.jpeg' else ext def _parse_imageinfo(index): def _resolve_uri(): list_ = image_uri_list if list_ is None or index >= len(list_) or list_[index] is None: raise ValueError('Must specify all required fields') value = list_[index] if isinstance(value, dict): value = ut.s3_dict_encode_to_str(value) return value def _resolve(list_, default='', assert_=False): if list_ is None or index >= len(list_) or list_[index] is None: if assert_: raise ValueError('Must specify all required fields') return default return list_[index] uri = _resolve_uri() orig_gname = basename(uri) ext = _get_standard_ext(uri) uuid_ = _resolve(image_uuid_list, assert_=True) if isinstance(uuid_, six.string_types): uuid_ = uuid.UUID(uuid_) param_tup = ( uuid_, uri, uri, _resolve(image_orig_name_list, default=orig_gname), _resolve(image_ext_list, default=ext), int(_resolve(image_width_list, assert_=True)), int(_resolve(image_height_list, assert_=True)), int(_resolve(image_time_posix_list, default=-1)), float(_resolve(image_gps_lat_list, default=-1.0)), float(_resolve(image_gps_lon_list, default=-1.0)), int(_resolve(image_orientation_list, default=0)), _resolve(image_notes_list), ) return param_tup # TODO: FIX ME SO THAT WE DON'T HAVE TO LOCALIZE EVERYTHING kwargs['auto_localize'] = kwargs.get('auto_localize', True) kwargs['sanitize'] = kwargs.get('sanitize', False) index_list = range(len(image_uri_list)) params_gen = ut.generate(_parse_imageinfo, index_list, adjust=True, force_serial=True, **kwargs) params_gen = list(params_gen) gpath_list = [_[0] for _ in params_gen] gid_list = ibs.add_images(gpath_list, params_list=params_gen, **kwargs) # NOQA # return gid_list image_uuid_list = ibs.get_image_uuids(gid_list) return image_uuid_list