def batch_query(em, force_recomp=False, test_cxs=None): '''Runs each test_cxs as a query. If test_cxs is None, then all queries are run''' 'TODO: Fix up the VM dependencies' vm, iom, am, cm = em.hs.get_managers('vm', 'iom', 'am', 'cm') # Compute the matches qm = vm.hs.qm vm.sample_train_set() vm.build_model(force_recomp=force_recomp) if test_cxs == None: test_cxs = vm.get_train_cx() logmsg('Building matching graph. This may take awhile') depends = ['chiprep', 'preproc', 'model', 'query'] algo_suffix = am.get_algo_suffix(depends) samp_suffix = vm.get_samp_suffix() result_dpath = iom.ensure_directory(iom.get_temp_fpath('raw_results')) rr_fmtstr_cid = os.path.join( result_dpath, 'rr_cid%07d' + samp_suffix + algo_suffix + '.pkl') # Find the Queries which need to be run unsaved_cxs = [] for cx in iter(test_cxs): cid = cm.cx2_cid[cx] rr_fpath = rr_fmtstr_cid % cid if not os.path.exists(rr_fpath): unsaved_cxs.append(cx) # Run Unsaved Query total = len(unsaved_cxs) for count, cx in enumerate(unsaved_cxs): logmsg('Query %d/%d' % (count, total)) em.run_and_save_query(cx, rr_fmtstr_cid) # Read Each Query cx2_rr = alloc_lists(test_cxs.max() + 1) total = len(test_cxs) for count, cx in enumerate(test_cxs): logmsg('Loading Result %d/%d' % (count, total)) cid = cm.cx2_cid[cx] rr_fpath = rr_fmtstr_cid % cid if not os.path.exists(rr_fpath): logwarn('Result does not exist for CID=%d' % cid) rr_file = open(rr_fpath, 'rb') try: rr = cPickle.load(rr_file) except EOFError: rr_file.close() os.remove(rr_fpath) logwarn('Result was corrupted for CID=%d' % cid) rr_file.close() rr.cx2_cscore_ = [] rr.cx2_fs_ = [] rr.qfdsc = [] rr.qfpts = [] cx2_rr[cx] = rr return cx2_rr
def batch_query(em, force_recomp=False, test_cxs=None): '''Runs each test_cxs as a query. If test_cxs is None, then all queries are run''' 'TODO: Fix up the VM dependencies' vm, iom, am, cm = em.hs.get_managers('vm','iom','am', 'cm') # Compute the matches qm = vm.hs.qm vm.sample_train_set() vm.build_model(force_recomp=force_recomp) if test_cxs == None: test_cxs = vm.get_train_cx() logmsg('Building matching graph. This may take awhile') depends = ['chiprep','preproc','model','query'] algo_suffix = am.get_algo_suffix(depends) samp_suffix = vm.get_samp_suffix() result_dpath = iom.ensure_directory(iom.get_temp_fpath('raw_results')) rr_fmtstr_cid = os.path.join(result_dpath, 'rr_cid%07d'+samp_suffix+algo_suffix+'.pkl') # Find the Queries which need to be run unsaved_cxs = [] for cx in iter(test_cxs): cid = cm.cx2_cid[cx] rr_fpath = rr_fmtstr_cid % cid if not os.path.exists(rr_fpath): unsaved_cxs.append(cx) # Run Unsaved Query total = len(unsaved_cxs) for count, cx in enumerate(unsaved_cxs): logmsg('Query %d/%d' % (count, total)) em.run_and_save_query(cx, rr_fmtstr_cid) # Read Each Query cx2_rr = alloc_lists(test_cxs.max()+1) total = len(test_cxs) for count, cx in enumerate(test_cxs): logmsg('Loading Result %d/%d' % (count, total)) cid = cm.cx2_cid[cx] rr_fpath = rr_fmtstr_cid % cid if not os.path.exists(rr_fpath): logwarn('Result does not exist for CID=%d' % cid) rr_file = open(rr_fpath,'rb') try: rr = cPickle.load(rr_file) except EOFError: rr_file.close() os.remove(rr_fpath) logwarn('Result was corrupted for CID=%d' % cid) rr_file.close() rr.cx2_cscore_ = [] rr.cx2_fs_ = [] rr.qfdsc = [] rr.qfpts = [] cx2_rr[cx] = rr return cx2_rr
def delete_model(vm): logdbg('Deleting Sample Index') if vm.flann != None: try: vm.flann.delete_index() vm.flann = None except WindowsError: logwarn('WARNING: FLANN is not deleting correctly') vm.reset()
def remove_file(iom, fpath): if iom.dummy_delete: logdbg("DummyDelete: %s" % fpath) return False logdbg("Deleting: %s" % fpath) try: os.remove(fpath) except OSError as e: logwarn("OSError: %s,\n Could not delete %s" % (str(e), fpath)) return False return True
def remove_file(iom, fpath): if iom.dummy_delete: logdbg('DummyDelete: %s' % fpath) return False logdbg('Deleting: %s' % fpath) try: os.remove(fpath) except OSError as e: logwarn('OSError: %s,\n Could not delete %s' % (str(e), fpath)) return False return True
def bilateral_filter(img): try: import skimage.filter img_uint8 = img img_float = float32(img_uint8) / 255 #mode = Points outside the boundaries of the input are filled according to the given mode (?constant?, ?nearest?, ?reflect? or ?wrap?). Default is ?constant?. img_bilat = skimage.filter.denoise_bilateral(img_float, win_size=20, sigma_range=1.6, sigma_spatial=1.6, bins=256, mode='reflect', cval='reflect') return img_bilat except Exception as ex: logdbg('Scikits not found: %s' % str(ex)) logwarn('Scikits not found: %s' % str(ex)) return img
def load_result(rr, rr_dpath, rr_suffix): 'Loads the result from the given database' rr_fpath = rr.rr_fpath(rr_dpath, rr_suffix) try: npz = np.load(rr_fpath) for _key in npz.files: if _key in ['qcx','qcid','qnid','dbid','qdbid']: rr.__dict__[_key] = npz[_key].tolist() else: rr.__dict__[_key] = npz[_key] # Numpy saving is werid. gotta cast except Exception as ex: os.remove(rr_fpath) logwarn('Load Result Exception : '+str(ex)+'\nResult was corrupted for CID=%d' % rr.qcid)
def load_features(cm, _cxs=None, force_recomp=False): if _cxs is None: cxs = cm.get_valid_cxs() elif type(_cxs) is types.ListType: cxs = np.array(_cxs) elif type(_cxs) in [types.IntType, types.LongType, np.uint32]: cxs = np.array([_cxs]) else: cxs = _cxs count_feat = 0 is_dirty = np.bitwise_or(cm.cx2_dirty_bit[cxs], force_recomp) num_samp = cxs.size num_dirty = np.sum(is_dirty) # HACKS if not np.iterable(is_dirty): is_dirty = np.array([is_dirty]) if not np.iterable(cxs): cxs = np.array([cxs]) load_cx = cxs[is_dirty] num_clean = num_samp - num_dirty #logdbg('Loading Features: Dirty=%d ; #Clean=%d' % (num_dirty, num_clean)) if num_dirty == 0: return logio('Loading %d Feature Reps' % num_dirty) am = cm.hs.am for cx in iter(load_cx): cid = cm.cx2_cid[cx] if cid <= 0: logwarn('WARNING: IX=' + str(cx) + ' is invalid') continue chiprep_fpath = cm.hs.iom.get_chiprep_fpath(cid) # Ensure that the features exists if force_recomp or not os.path.exists(chiprep_fpath): logio('Computing and saving features of cid=' + str(cid)) hotspotter.ChipFunctions.precompute_chipreps( cm.hs, [cx], num_procs=1, force_recompute=force_recomp) # Load the features logdbg('Loading features in ' + chiprep_fpath) npz = np.load(chiprep_fpath) fpts = npz['arr_0'] fdsc = npz['arr_1'] npz.close() cm.cx2_fpts[cx] = fpts cm.cx2_fdsc[cx] = fdsc cm.cx2_dirty_bit[cx] = False count_feat += len(fpts) logdbg('* Loaded ' + str(count_feat) + ' keypoints and fdscriptors') return True
def _check_altfname(iom, alt_names=None): 'Checks for a legacy data table' alt_dirs = [iom.get_internal_dpath(), iom.hs.db_dpath, join(iom.hs.db_dpath,'data'), join(iom.hs.db_dpath,'data','..','data','..')] for adir in iter(alt_dirs): for aname in iter(alt_names): alt_fpath = normpath(join(adir,aname)) logdbg('Checking: '+alt_fpath) if exists(alt_fpath): logwarn('Using Alternative Datatable '+alt_fpath) timestamp = str(time.time()) backup_fpath = normpath(alt_fpath+'.'+timestamp+'.bak') logwarn('Creating Backup: '+backup_fpath) shutil.copyfile(alt_fpath, backup_fpath) return alt_fpath if iom.hs.db_dpath.find(iom.internal_dname) >= 0: # Disallow Hotspotter directories inside HotSpotter directories new_db_path = iom.hs.db_dpath[0:iom.hs.db_dpath.find(iom.internal_dname)] logwarn('Changing this data dir '+iom.hs.db_dpath) logwarn('To that data dir '+new_db_path) iom.hs.db_dpath = new_db_path return 'CSV_Name_not_found'
def _check_altfname(iom, alt_names=None): "Checks for a legacy data table" alt_dirs = [ iom.get_internal_dpath(), iom.hs.db_dpath, join(iom.hs.db_dpath, "data"), join(iom.hs.db_dpath, "data", "..", "data", ".."), ] for adir in iter(alt_dirs): for aname in iter(alt_names): alt_fpath = normpath(join(adir, aname)) logdbg("Checking: " + alt_fpath) if exists(alt_fpath): logwarn("Using Alternative Datatable " + alt_fpath) timestamp = str(time.time()) backup_fpath = normpath(alt_fpath + "." + timestamp + ".bak") logwarn("Creating Backup: " + backup_fpath) shutil.copyfile(alt_fpath, backup_fpath) return alt_fpath if iom.hs.db_dpath.find(iom.internal_dname) >= 0: # Disallow Hotspotter directories inside HotSpotter directories new_db_path = iom.hs.db_dpath[0 : iom.hs.db_dpath.find(iom.internal_dname)] logwarn("Changing this data dir " + iom.hs.db_dpath) logwarn("To that data dir " + new_db_path) iom.hs.db_dpath = new_db_path return "CSV_Name_not_found"
def load_result(rr, rr_dpath, rr_suffix): 'Loads the result from the given database' rr_fpath = rr.rr_fpath(rr_dpath, rr_suffix) try: npz = np.load(rr_fpath) for _key in npz.files: if _key in ['qcx', 'qcid', 'qnid', 'dbid', 'qdbid']: rr.__dict__[_key] = npz[_key].tolist() else: rr.__dict__[_key] = npz[_key] # Numpy saving is werid. gotta cast except Exception as ex: os.remove(rr_fpath) logwarn('Load Result Exception : ' + str(ex) + '\nResult was corrupted for CID=%d' % rr.qcid)
def load_features(cm, _cxs=None, force_recomp=False): if _cxs is None: cxs = cm.get_valid_cxs() elif type(_cxs) is types.ListType: cxs = np.array(_cxs) elif type(_cxs) in [types.IntType, types.LongType, np.uint32]: cxs = np.array([_cxs]) else: cxs = _cxs count_feat = 0 is_dirty = np.bitwise_or(cm.cx2_dirty_bit[cxs], force_recomp) num_samp = cxs.size num_dirty = np.sum(is_dirty) # HACKS if not np.iterable(is_dirty): is_dirty = np.array([is_dirty]) if not np.iterable(cxs): cxs = np.array([cxs]) load_cx = cxs[is_dirty] num_clean = num_samp - num_dirty #logdbg('Loading Features: Dirty=%d ; #Clean=%d' % (num_dirty, num_clean)) if num_dirty == 0: return logio('Loading %d Feature Reps' % num_dirty) am = cm.hs.am for cx in iter(load_cx): cid = cm.cx2_cid[cx] if cid <= 0: logwarn('WARNING: IX='+str(cx)+' is invalid'); continue chiprep_fpath = cm.hs.iom.get_chiprep_fpath(cid) # Ensure that the features exists if force_recomp or not os.path.exists(chiprep_fpath): logio('Computing and saving features of cid='+str(cid)) hotspotter.ChipFunctions.precompute_chipreps(cm.hs, [cx], num_procs=1, force_recompute=force_recomp) # Load the features logdbg('Loading features in '+chiprep_fpath) npz = np.load(chiprep_fpath) fpts = npz['arr_0'] fdsc = npz['arr_1'] npz.close() cm.cx2_fpts[cx] = fpts cm.cx2_fdsc[cx] = fdsc cm.cx2_dirty_bit[cx] = False count_feat += len(fpts) logdbg('* Loaded '+str(count_feat)+' keypoints and fdscriptors' ) return True
def end_draw(dm): #gray() logdbg('Finalizing Draw with '+str(len(dm.ax_list))+' axes') fig = dm.get_figure() #fig.subplots_adjust(hspace=0.2, wspace=0.2) #fig.tight_layout(pad=.3, h_pad=None, w_pad=None) #fig.tight_layout() if dm.draw_prefs.in_qtc_bit: try: from IPython.back.display import display display(fig) except: logwarn('Cannot Draw in QTConsole') fig.show() dm.hs.uim.redraw_gui() fig.canvas.draw()
def load_tables(iom): logmsg('Loading data tables in '+iom.hs.db_dpath) if not (exists(iom.get_image_table_fpath()) and\ exists(iom.get_name_table_fpath()) and\ exists(iom.get_image_table_fpath())): if exists(iom.get_oxford_gt_dpath()): logmsg('You have selected an Oxford style groundtruth') iom.load_oxford_gt() logmsg('Succesfully Loaded Oxford style groundtruth') sys.stdout.flush() return logwarn('Trying to load a Legacy Database') iom.load_image_table() iom.load_name_table() iom.load_chip_table() logmsg('Done loading data tables') sys.stdout.flush()
def adapt_histeq(img): try: from skimage import exposure # input uint8, output uint16 img_uint8 = img img_uint16 = uint16(img)*2**8 img_adapteq_uint16 = exposure.equalize_adapthist(img_uint16,\ ntiles_x=8,\ ntiles_y=8,\ clip_limit=0.01,\ nbins=256) img_adapteq_cropped_uint8 = uint8(img_adapteq_uint16[5:-5][5:-5] / uint16(2)**8 ) return img_adapteq_cropped_uint8 except Exception as ex: logdbg('Scikits not found: %s' % str(ex)) logwarn('Scikits not found: %s' % str(ex)) return img
def end_draw(dm): # gray() logdbg("Finalizing Draw with " + str(len(dm.ax_list)) + " axes") fig = dm.get_figure() # fig.subplots_adjust(hspace=0.2, wspace=0.2) # fig.tight_layout(pad=.3, h_pad=None, w_pad=None) # fig.tight_layout() if dm.draw_prefs.in_qtc_bit: try: from IPython.back.display import display display(fig) except: logwarn("Cannot Draw in QTConsole") fig.show() dm.hs.uim.redraw_gui() fig.canvas.draw()
def is_valid_db_dpath(hs, db_dpath): 'Checks to see if database conforms to expected conventions' if not os.path.exists(db_dpath): logwarn('db_dpath \"'+str(db_dpath)+'\" doesnt exist') return False db_dpath_files = os.listdir(db_dpath) if hs.iom.internal_dname in db_dpath_files: logmsg('Opening a HotSpotter database: '+db_dpath) elif 'images' in db_dpath_files or\ 'data' in db_dpath_files: logmsg('Opening a StripSpotter database: '+db_dpath) elif len(db_dpath_files) == 0: logmsg('Creating a new database: '+db_dpath) else: logwarn('Unknown database type: '+db_dpath) logdbg('Files in dir: '+str(db_dpath_files)) return False return True
def adapt_histeq(img): try: from skimage import exposure # input uint8, output uint16 img_uint8 = img img_uint16 = uint16(img) * 2**8 img_adapteq_uint16 = exposure.equalize_adapthist(img_uint16,\ ntiles_x=8,\ ntiles_y=8,\ clip_limit=0.01,\ nbins=256) img_adapteq_cropped_uint8 = uint8(img_adapteq_uint16[5:-5][5:-5] / uint16(2)**8) return img_adapteq_cropped_uint8 except Exception as ex: logdbg('Scikits not found: %s' % str(ex)) logwarn('Scikits not found: %s' % str(ex)) return img
def _scaled_size(cm, cx, dtype=float, rotated=False): '''Returns the ChipSpace size of cx. Without considering rotation Depends on the current algorithm settings dtype specifies the percision of return type''' # Compute Unrotated Chip Space # Get raw size and target sizze (_, _, rw, rh) = cm.cx2_roi[cx] target_diag_pxls = cm.hs.am.algo_prefs.preproc.sqrt_num_pxls # HACK: Double the size like Lowe; instead of normalizing if target_diag_pxls == -1: current_num_diag_pxls = np.sqrt(rw**2 + rh**2) target_diag_pxls = current_num_diag_pxls * 2 # max(, 5000) ar = np.float(rw) / np.float(rh) # aspect ratio if ar > 4 or ar < .25: logwarn('Aspect ratio for cx=%d %.2f may be too extreme' % (cx, ar)) # Compute Unoriented scaled chip's width and height ucw = np.sqrt(ar**2 * target_diag_pxls**2 / (ar**2 + 1)) uch = ucw / ar # Rotate Unrotated Chip Space into Rotated Chip Space if rotated: theta = cm.cx2_theta[cx] rot = np.array(([np.cos(theta), -np.sin(theta) ], [np.sin(theta), np.cos(theta)]), dtype=np.float) # Extend of Unrotated Chip Space. Center shifted to the origin pts_00 = np.array([(0, 0), (ucw, 0), (ucw, uch), (0, uch)]) - np.array((ucw, uch)) / 2 rot_pts = pts_00.dot(rot) xymin = rot_pts.min(0) xymax = rot_pts.max(0) # Floating point Rotated Chip w/h cw, ch = xymax - xymin else: # Floating point Unrotated Chip w/h cw, ch = ucw, uch # Convert to the specified dtype at the end if dtype is np.float: return cw, ch elif np.dtype(dtype).kind == 'f': return dtype(cw), dtype(ch) else: return dtype(round(cw)), dtype(round(ch))
def load_tables(iom): logmsg("Loading data tables in " + iom.hs.db_dpath) if not ( exists(iom.get_image_table_fpath()) and exists(iom.get_name_table_fpath()) and exists(iom.get_image_table_fpath()) ): if exists(iom.get_oxford_gt_dpath()): logmsg("You have selected an Oxford style groundtruth") iom.load_oxford_gt() logmsg("Succesfully Loaded Oxford style groundtruth") sys.stdout.flush() return logwarn("Trying to load a Legacy Database") iom.load_image_table() iom.load_name_table() iom.load_chip_table() logmsg("Done loading data tables") sys.stdout.flush()
def smartget_db_dpath(hs, db_dpath): ''' Performs a smart update of the db_dpath Trys a number of various options to get it right None = Read from preferences '' = Prompt the User For database ''' if db_dpath is None: # If requested to read prefs db_dpath = str(hs.core_prefs.database_dpath) if db_dpath in [None, 'None'] or\ not os.path.exists(db_dpath): # Check validity logwarn('db_dpath='+repr(db_dpath)+' is invalid') db_dpath = '' if db_dpath == '': # Prompt The User. TODO Move this to Facade/UIManager logmsg('what database should I open?') try: db_dpath = hs.uim.select_database() except: logerr(' Was unable to prompt user with QT') return db_dpath
def _scaled_size(cm, cx, dtype=float, rotated=False): '''Returns the ChipSpace size of cx. Without considering rotation Depends on the current algorithm settings dtype specifies the percision of return type''' # Compute Unrotated Chip Space # Get raw size and target sizze (_, _, rw, rh) = cm.cx2_roi[cx] target_diag_pxls = cm.hs.am.algo_prefs.preproc.sqrt_num_pxls # HACK: Double the size like Lowe; instead of normalizing if target_diag_pxls == -1: current_num_diag_pxls = np.sqrt(rw**2 + rh**2) target_diag_pxls = current_num_diag_pxls*2 # max(, 5000) ar = np.float(rw)/np.float(rh) # aspect ratio if ar > 4 or ar < .25: logwarn( 'Aspect ratio for cx=%d %.2f may be too extreme' % (cx, ar)) # Compute Unoriented scaled chip's width and height ucw = np.sqrt(ar**2 * target_diag_pxls**2 / (ar**2 + 1)) uch = ucw / ar # Rotate Unrotated Chip Space into Rotated Chip Space if rotated: theta = cm.cx2_theta[cx] rot = np.array(([np.cos(theta), -np.sin(theta)], [np.sin(theta), np.cos(theta)]), dtype=np.float) # Extend of Unrotated Chip Space. Center shifted to the origin pts_00 = np.array([(0,0), (ucw,0), (ucw,uch), (0, uch)]) - np.array((ucw, uch))/2 rot_pts = pts_00.dot(rot) xymin = rot_pts.min(0) xymax = rot_pts.max(0) # Floating point Rotated Chip w/h cw, ch = xymax - xymin else: # Floating point Unrotated Chip w/h cw, ch = ucw, uch # Convert to the specified dtype at the end if dtype is np.float: return cw, ch elif np.dtype(dtype).kind == 'f': return dtype(cw), dtype(ch) else: return dtype(round(cw)), dtype(round(ch))
def external_feature_computers(am, chip): 'Write chip ; call extern executable ; read output ; return (kpts,desc)' logdbg('Calling external kpt detector') iom = am.hs.iom chip = Image.fromarray(chip) tmp_chip_fpath = iom.get_temp_fpath('tmp.ppm') chip.save(tmp_chip_fpath, 'PPM') perdoch_external = ['heshesaff'] mikolajczyk_external = ['heslapaff', 'dense'] if am.algo_prefs.chiprep.kpts_detector in perdoch_external: exename = iom.get_hesaff_exec() outname = tmp_chip_fpath + '.hesaff.sift' args = '"' + tmp_chip_fpath + '"' elif am.algo_prefs.chiprep.kpts_detector in mikolajczyk_external: exename = iom.get_inria_exec() feature_name = am.algo_prefs.chiprep.kpts_detector if feature_name == 'heslapaff': feature_name = 'hesaff' suffix = 'hesaff' if feature_name == 'dense': feature_name = feature_name + ' 6 6' suffix = 'dense' outname = tmp_chip_fpath + '.' + suffix + '.sift' args = '-' + feature_name + ' -sift -i "' + tmp_chip_fpath + '"' else: logerr('Method %r + %r is invalid in extern_detect_kpts.m'\ % (am.algo_prefs.chiprep.kpts_detector, am.algo_prefs.chiprep.kpts_extractor)) cmd = exename + ' ' + args logdbg('External Executing: %r ' % cmd) try: proc = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) logdbg('External Execution did not throw an error') (out, err) = proc.communicate() logdbg(str(out) + ' ' + str(err)) if proc.returncode != 0: logerr('Failed to execute ' + cmd + '\n OUTPUT: ' + out) if not os.path.exists(outname): logerr('The output file doesnt exist: ' + outname) logdbg('External Output:\n' + out[:-1]) except Exception as ex: logwarn( 'An Exception occurred while calling the keypoint detector: ' + str(ex)) try: ret2 = os.system(cmd) if ret2 != 0: logerr( str(ex) + '\nThe backup keypoint detector didnt work either!') except Exception as ex2: logerr(str(ex2)) fid = file(outname, 'r') ndims = int(fid.readline()) nkpts = int(fid.readline()) if ndims != 128: raise Exception(' These are not SIFT dexcriptors ') kpts = np.zeros((nkpts, 5), dtype=np.float32) desc = np.zeros((nkpts, ndims), dtype=np.uint8) lines = fid.readlines() # SIFT descriptors are computed with a radius of r=3*np.sqrt(3*s) # s = (det A_i) ^ (-1/2) OR # s = sqrtm(inv(det(A_i))) for i in range(nkpts): nums = lines[i].split(' ') kpts[i, :] = np.array(map(lambda _: float(_), nums[0:5]), dtype=np.float32) desc[i, :] = np.array(map(lambda _: np.uint8(_), nums[5:]), dtype=np.uint8) fid.close() return (kpts, desc) '''
def external_feature_computers(am, chip): 'Write chip ; call extern executable ; read output ; return (kpts,desc)' logdbg('Calling external kpt detector') iom = am.hs.iom chip = Image.fromarray(chip) tmp_chip_fpath = iom.get_temp_fpath('tmp.ppm') chip.save(tmp_chip_fpath,'PPM') perdoch_external = ['heshesaff'] mikolajczyk_external = ['heslapaff','dense'] if am.algo_prefs.chiprep.kpts_detector in perdoch_external: exename = iom.get_hesaff_exec() outname = tmp_chip_fpath+'.hesaff.sift' args = '"'+tmp_chip_fpath+'"' elif am.algo_prefs.chiprep.kpts_detector in mikolajczyk_external: exename = iom.get_inria_exec() feature_name = am.algo_prefs.chiprep.kpts_detector if feature_name == 'heslapaff': feature_name = 'hesaff' suffix = 'hesaff' if feature_name == 'dense': feature_name = feature_name+' 6 6' suffix = 'dense' outname = tmp_chip_fpath+'.'+suffix+'.sift' args = '-'+feature_name+' -sift -i "'+tmp_chip_fpath+'"' else: logerr('Method %r + %r is invalid in extern_detect_kpts.m'\ % (am.algo_prefs.chiprep.kpts_detector, am.algo_prefs.chiprep.kpts_extractor)) cmd = exename+' '+args logdbg('External Executing: %r ' % cmd) try: proc = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE) logdbg('External Execution did not throw an error') (out, err) = proc.communicate() logdbg(str(out)+' '+str(err)) if proc.returncode != 0: logerr('Failed to execute '+cmd+'\n OUTPUT: '+out) if not os.path.exists(outname): logerr('The output file doesnt exist: '+outname) logdbg('External Output:\n'+out[:-1]) except Exception as ex: logwarn('An Exception occurred while calling the keypoint detector: '+str(ex)) try: ret2 = os.system(cmd) if ret2 != 0: logerr(str(ex)+'\nThe backup keypoint detector didnt work either!') except Exception as ex2: logerr(str(ex2)) fid = file(outname,'r') ndims = int(fid.readline()) nkpts = int(fid.readline()) if ndims != 128: raise Exception(' These are not SIFT dexcriptors ') kpts = np.zeros((nkpts,5), dtype=np.float32) desc = np.zeros((nkpts,ndims),dtype=np.uint8) lines = fid.readlines() # SIFT descriptors are computed with a radius of r=3*np.sqrt(3*s) # s = (det A_i) ^ (-1/2) OR # s = sqrtm(inv(det(A_i))) for i in range(nkpts): nums = lines[i].split(' ') kpts[i,:] = np.array(map(lambda _: float(_) , nums[0:5]),dtype=np.float32) desc[i,:] = np.array(map(lambda _: np.uint8(_), nums[5:]),dtype=np.uint8) fid.close() return (kpts, desc) '''
def load_csv_line(cm, csv_data, csv_headers): if csv_headers is None: csv_headers = cm.default_fields num_unspecified = len(csv_headers) - len(csv_data) if num_unspecified != 0: csv_data += ['' for _ in xrange(num_unspecified)] unspecified_type = ['data', 'headers'][num_unspecified > 0] logwarn(('\n\nIn chip_file: %d unspecified %s\n'+\ 'csv_headers=%r\n'+\ 'csv_data=%r\n\n')\ % (abs(num_unspecified), unspecified_type, csv_headers, csv_data)) # Build field name -> field value map dmap = {k: v for (k, v) in zip(csv_headers, csv_data)} if cm.hs.core_prefs.legacy_bit: # Legacy: Be Backwards Compatible if 'imgindex' in dmap.keys(): logwarn('Found imgindex') imgindex = int(dmap['imgindex']) gname = 'img-%07d.jpg' % imgindex cm.hs.gm.add_img(int(imgindex), gname, False) dmap['gid'] = imgindex dmap['cid'] = imgindex del dmap['imgindex'] if 'animal_name' in dmap.keys(): logwarn('Found animal_name') dmap['nid'] = cm.hs.nm.add_name(-1, dmap['animal_name']) del dmap['animal_name'] if 'instance_id' in dmap.keys(): dmap['cid'] = dmap['instance_id'] del dmap['instance_id'] if 'image_id' in dmap.keys(): dmap['gid'] = dmap['image_id'] del dmap['image_id'] if 'name_id' in dmap.keys(): dmap['nid'] = dmap['name_id'] del dmap['name_id'] # Read IDs cid = int(dmap['cid']) del dmap['cid'] gid = int(dmap['gid']) del dmap['gid'] nid = int(dmap['nid']) del dmap['nid'] # Read Theta try: theta = np.float32(dmap['theta']) del dmap['theta'] except KeyError as ex: theta = 0 # Read ROI roi_str = re.sub(' *', ' ', dmap['roi'].replace(']', '').replace( '[', '')).strip(' ').rstrip() roi = map(lambda x: int(round(float(x))), roi_str.split(' ')) del dmap['roi'] # Read User Props, whatever is left in dmap props = dmap nx = cm.hs.nm.nid2_nx[nid] gx = cm.hs.gm.gid2_gx[gid] if gx == 0 or nx == 0 or gid == 0 or nid == 0: err_msg = 'Adding Chip: (cid=%d),(nid=%d,nx=%d),(gid=%d,gx=%d)' % ( cid, nid, nx, gid, gx) err_msg += '\nChip has invalid indexes. (Maybe you deleted an image from the images directory?) ' logwarn(err_msg) cm.add_chip(cid, nx, gx, roi, theta, props=props, delete_prev=False)
def load_csv_line(cm, csv_data, csv_headers): if csv_headers is None: csv_headers = cm.default_fields num_unspecified = len(csv_headers) - len(csv_data) if num_unspecified != 0: csv_data += ['' for _ in xrange(num_unspecified)] unspecified_type = ['data','headers'][num_unspecified > 0] logwarn(('\n\nIn chip_file: %d unspecified %s\n'+\ 'csv_headers=%r\n'+\ 'csv_data=%r\n\n')\ % (abs(num_unspecified), unspecified_type, csv_headers, csv_data)) # Build field name -> field value map dmap = {k:v for (k,v) in zip(csv_headers,csv_data)} if cm.hs.core_prefs.legacy_bit: # Legacy: Be Backwards Compatible if 'imgindex' in dmap.keys(): logwarn('Found imgindex') imgindex = int(dmap['imgindex']) gname = 'img-%07d.jpg' % imgindex cm.hs.gm.add_img(int(imgindex), gname, False) dmap['gid'] = imgindex dmap['cid'] = imgindex del dmap['imgindex'] if 'animal_name' in dmap.keys(): logwarn('Found animal_name') dmap['nid'] = cm.hs.nm.add_name(-1, dmap['animal_name']) del dmap['animal_name'] if 'instance_id' in dmap.keys(): dmap['cid'] = dmap['instance_id'] del dmap['instance_id'] if 'image_id' in dmap.keys(): dmap['gid'] = dmap['image_id'] del dmap['image_id'] if 'name_id' in dmap.keys(): dmap['nid'] = dmap['name_id'] del dmap['name_id'] # Read IDs cid = int(dmap['cid']); del dmap['cid'] gid = int(dmap['gid']); del dmap['gid'] nid = int(dmap['nid']); del dmap['nid'] # Read Theta try: theta = np.float32(dmap['theta']) del dmap['theta'] except KeyError as ex: theta = 0 # Read ROI roi_str = re.sub(' *',' ', dmap['roi'].replace(']','').replace('[','')).strip(' ').rstrip() roi = map(lambda x: int(round(float(x))),roi_str.split(' ')) del dmap['roi'] # Read User Props, whatever is left in dmap props = dmap nx = cm.hs.nm.nid2_nx[nid] gx = cm.hs.gm.gid2_gx[gid] if gx == 0 or nx == 0 or gid == 0 or nid == 0: err_msg = 'Adding Chip: (cid=%d),(nid=%d,nx=%d),(gid=%d,gx=%d)' % (cid, nid, nx, gid, gx) err_msg += '\nChip has invalid indexes. (Maybe you deleted an image from the images directory?) ' logwarn(err_msg) cm.add_chip(cid, nx, gx, roi, theta, props=props, delete_prev=False)