def reindex(nnindexer, verbose=True, memtrack=None): r""" indexes all vectors with FLANN. """ num_vecs = nnindexer.num_indexed notify_num = 1e6 verbose_ = ut.VERYVERBOSE or verbose or (not ut.QUIET and num_vecs > notify_num) if verbose_: logger.info( '[nnindex] ...building kdtree over %d points (this may take a sec).' % num_vecs) tt = ut.tic(msg='Building index') idx2_vec = nnindexer.idx2_vec flann_params = nnindexer.flann_params if num_vecs == 0: logger.info( 'WARNING: CANNOT BUILD FLANN INDEX OVER 0 POINTS. THIS MAY BE A SIGN OF A DEEPER ISSUE' ) else: if memtrack is not None: memtrack.report('BEFORE BUILD FLANN INDEX') nnindexer.flann.build_index(idx2_vec, **flann_params) if memtrack is not None: memtrack.report('AFTER BUILD FLANN INDEX') if verbose_: ut.toc(tt)
def reindex(self, verbose=True): num_vecs = len(self.wx2_word) if verbose: print('[nnindex] ...building kdtree over %d points (this may take a sec).' % num_vecs) tt = ut.tic(msg='Building vocab index') if num_vecs == 0: print('WARNING: CANNOT BUILD FLANN INDEX OVER 0 POINTS. THIS MAY BE A SIGN OF A DEEPER ISSUE') else: self.wordflann.build_index(self.wx2_word, **self.flann_params) if verbose: ut.toc(tt)
def TEST_SQL_NUMPY(): sqldb_fname = 'temp_test_sql_numpy.sqlite3' sqldb_dpath = utool.util_cplat.get_app_resource_dir('ibeis', 'testfiles') utool.ensuredir(sqldb_dpath) utool.util_path.remove_file(join(sqldb_dpath, sqldb_fname), dryrun=False) db = sqldbc.SQLDatabaseController(sqldb_dpath=sqldb_dpath, sqldb_fname=sqldb_fname) db.add_table('temp', [ ('temp_id', 'INTEGER PRIMARY KEY'), ('temp_hash', 'NUMPY'), ]) tt = utool.tic() feats_list = grab_numpy_testdata(shape=(3e3, 128), dtype=np.uint8) print(' * numpy.new time=%r sec' % utool.toc(tt)) print('[TEST] insert numpy arrays') tt = utool.tic() feats_iter = ((feats, ) for feats in feats_list) db.executemany(operation=''' INSERT INTO temp ( temp_hash ) VALUES (?) ''', params_iter=feats_iter) print(' * execute insert time=%r sec' % utool.toc(tt)) print('[TEST] save sql database') tt = utool.tic() #db.cur.commit() db.connection.commit() print(' * commit time=%r sec' % utool.toc(tt)) print('[TEST] read from sql database') tt = utool.tic() db.cur.execute('SELECT temp_hash FROM temp', []) print(' * execute select time=%r sec' % utool.toc(tt)) tt = utool.tic() result_list = _results_gen(db.cur) print(' * iter results time=%r sec' % utool.toc(tt)) print(' * memory(result_list) = %s' % utool.byte_str2(utool.get_object_size(result_list))) del result_list #print('[TEST] result_list=%r' % result_list) print('[TEST] dump sql database') tt = utool.tic() db.dump('temp.dump.txt') print(' * dump time=%r sec' % utool.toc(tt)) #with open('temp.dump.txt') as file_: # print(file_.read()) return locals()
def TEST_SQL_NUMPY(): sqldb_fname = 'temp_test_sql_numpy.sqlite3' sqldb_dpath = utool.util_cplat.get_app_resource_dir('ibeis', 'testfiles') utool.ensuredir(sqldb_dpath) utool.util_path.remove_file(join(sqldb_dpath, sqldb_fname), dryrun=False) db = sqldbc.SQLDatabaseController(sqldb_dpath=sqldb_dpath, sqldb_fname=sqldb_fname) db.schema('temp', [ ('temp_id', 'INTEGER PRIMARY KEY'), ('temp_hash', 'NUMPY'), ]) tt = utool.tic() feats_list = grab_numpy_testdata(shape=(3e3, 128), dtype=np.uint8) print(' * numpy.new time=%r sec' % utool.toc(tt)) print('[TEST] insert numpy arrays') tt = utool.tic() feats_iter = ((feats, ) for feats in feats_list) db.executemany(operation=''' INSERT INTO temp ( temp_hash ) VALUES (?) ''', params_iter=feats_iter) print(' * execute insert time=%r sec' % utool.toc(tt)) print('[TEST] save sql database') tt = utool.tic() #db.cur.commit() db.connection.commit() print(' * commit time=%r sec' % utool.toc(tt)) print('[TEST] read from sql database') tt = utool.tic() db.cur.execute('SELECT temp_hash FROM temp', []) print(' * execute select time=%r sec' % utool.toc(tt)) tt = utool.tic() result_list = _results_gen(db.cur) print(' * iter results time=%r sec' % utool.toc(tt)) print(' * memory(result_list) = %s' % utool.byte_str2(utool.get_object_size(result_list))) del result_list #print('[TEST] result_list=%r' % result_list) print('[TEST] dump sql database') tt = utool.tic() db.dump('temp.dump.txt') print(' * dump time=%r sec' % utool.toc(tt)) #with open('temp.dump.txt') as file_: # print(file_.read()) return locals()
def reindex(self, verbose=True): num_vecs = len(self.wx2_word) if verbose: print( '[nnindex] ...building kdtree over %d points (this may take a sec).' % num_vecs) tt = ut.tic(msg='Building vocab index') if num_vecs == 0: print( 'WARNING: CANNOT BUILD FLANN INDEX OVER 0 POINTS. THIS MAY BE A SIGN OF A DEEPER ISSUE' ) else: self.wordflann.build_index(self.wx2_word, **self.flann_params) if verbose: ut.toc(tt)
def _check_for_double_click(iqrw, qtindex): threshold = 0.50 # seconds distance = utool.toc(iqrw.tt) print('Pressed %r' % (distance,)) col = qtindex.column() model = qtindex.model() colname = model.get_header_name(col) if distance <= threshold: if colname == 'status': iqrw.view.clicked.emit(qtindex) iqrw._on_click(qtindex) else: #iqrw.view.doubleClicked.emit(qtindex) iqrw._on_doubleclick(qtindex) iqrw.tt = utool.tic()
def build(vocab, verbose=True): num_vecs = len(vocab.wx_to_word) if vocab.wordflann is None: flannclass = pickle_flann.PickleFLANN vocab.wordflann = flannclass() if verbose: logger.info(' ...build kdtree with %d points (may take a sec).' % num_vecs) tt = ut.tic(msg='Building vocab index') if num_vecs == 0: logger.info('WARNING: CANNOT BUILD FLANN INDEX OVER 0 POINTS.') logger.info('THIS MAY BE A SIGN OF A DEEPER ISSUE') else: vocab.wordflann.build_index(vocab.wx_to_word, **vocab.flann_params) if verbose: ut.toc(tt)
def _akmeans_iterate(data, centroids, datax2_clusterx_old, max_iters, flann_params, ave_unchanged_thresh, ave_unchanged_iterwin): """ Helper function which continues the iterations of akmeans """ num_data = data.shape[0] num_clusters = centroids.shape[0] # Keep track of how many points have changed in each iteration xx2_unchanged = np.zeros(ave_unchanged_iterwin, dtype=centroids.dtype) + len(data) print('[akmeans] Running akmeans: data.shape=%r ; num_clusters=%r' % (data.shape, num_clusters)) print('[akmeans] * max_iters = %r ' % max_iters) print('[akmeans] * ave_unchanged_iterwin=%r ; ave_unchanged_thresh=%r' % (ave_unchanged_thresh, ave_unchanged_iterwin)) #print('[akmeans] Printing akmeans info in format: time (iterx, ave(#changed), #unchanged)') xx = 0 for xx in range(0, max_iters): tt = ut.tic() ut.print_('...tic') # 1) Find each datapoints nearest cluster center (datax2_clusterx, _dist) = nn.ann_flann_once(centroids, data, 1, flann_params) ellapsed = ut.toc(tt) ut.print_('...toc(%.2fs)' % ellapsed) # 2) Compute new cluster centers centroids = _compute_cluster_centers(num_data, num_clusters, data, centroids, datax2_clusterx) # 3) Check for convergence (no change of cluster index) #ut.print_('+') num_changed = (datax2_clusterx_old != datax2_clusterx).sum() xx2_unchanged[xx % ave_unchanged_iterwin] = num_changed ave_unchanged = xx2_unchanged.mean() #ut.print_(' (%d, %.2f, %d)\n' % (xx, ave_unchanged, num_changed)) if ave_unchanged < ave_unchanged_thresh: break else: # Iterate datax2_clusterx_old = datax2_clusterx #if xx % 5 == 0: # sys.stdout.flush() if xx == max_iters: print('[akmeans] * AKMEANS: converged in %d/%d iters' % (xx + 1, max_iters)) else: print('[akmeans] * AKMEANS: reached the maximum iterations after in %d/%d iters' % (xx + 1, max_iters)) sys.stdout.flush() return (datax2_clusterx, centroids)
def reindex(nnindexer, verbose=True, memtrack=None): r""" indexes all vectors with FLANN. """ num_vecs = nnindexer.num_indexed notify_num = 1E6 verbose_ = ut.VERYVERBOSE or verbose or ( not ut.QUIET and num_vecs > notify_num) if verbose_: print('[nnindex] ...building kdtree over %d points (this may take a sec).' % num_vecs) tt = ut.tic(msg='Building index') idx2_vec = nnindexer.idx2_vec flann_params = nnindexer.flann_params if num_vecs == 0: print('WARNING: CANNOT BUILD FLANN INDEX OVER 0 POINTS. THIS MAY BE A SIGN OF A DEEPER ISSUE') else: if memtrack is not None: memtrack.report('BEFORE BUILD FLANN INDEX') nnindexer.flann.build_index(idx2_vec, **flann_params) if memtrack is not None: memtrack.report('AFTER BUILD FLANN INDEX') if verbose_: ut.toc(tt)
def __init__(qres_wgt, ibs, qaid2_qres, parent=None, callback=None, **kwargs): print('[qres_wgt] Init QueryResultsWidget') # Uncomment below to turn on FilterProxyModel if USE_FILTER_PROXY: APIItemWidget.__init__(qres_wgt, parent=parent, model_class=CustomFilterModel) else: APIItemWidget.__init__(qres_wgt, parent=parent) qres_wgt.show_new = True qres_wgt.show_join = True qres_wgt.show_split = True qres_wgt.tt = utool.tic() # Set results data qres_wgt.add_checkboxes(qres_wgt.show_new, qres_wgt.show_join, qres_wgt.show_split) qres_wgt.set_query_results(ibs, qaid2_qres, **kwargs) qres_wgt.connect_signals_and_slots() if callback is None: callback = lambda: None qres_wgt.callback = callback qres_wgt.view.setColumnHidden(0, False) qres_wgt.view.setColumnHidden(1, False) if parent is None: # Register parentless QWidgets fig_presenter.register_qt4_win(qres_wgt)
def __init__( qres_wgt, ibs, cm_list, parent=None, callback=None, qreq_=None, query_title='', review_cfg={}, ): if ut.VERBOSE: logger.info('[qres_wgt] Init QueryResultsWidget') assert not isinstance(cm_list, dict) assert qreq_ is not None, 'must specify qreq_' if USE_FILTER_PROXY: super(QueryResultsWidget, qres_wgt).__init__(parent=parent, model_class=CustomFilterModel) else: super(QueryResultsWidget, qres_wgt).__init__(parent=parent) # if USE_FILTER_PROXY: # APIItemWidget.__init__(qres_wgt, parent=parent, # model_class=CustomFilterModel) # else: # APIItemWidget.__init__(qres_wgt, parent=parent) qres_wgt.cm_list = cm_list qres_wgt.ibs = ibs qres_wgt.qreq_ = qreq_ qres_wgt.query_title = query_title qres_wgt.qaid2_cm = dict([(cm.qaid, cm) for cm in cm_list]) qres_wgt.review_cfg = id_review_api.REVIEW_CFG_DEFAULTS.copy() qres_wgt.review_cfg = ut.update_existing(qres_wgt.review_cfg, review_cfg, assert_exists=True) # qres_wgt.altkey_shortcut = # QtWidgets.QShortcut(QtGui.QKeySequence(QtCore.Qt.ALT), qres_wgt, # qres_wgt.on_alt_pressed, # context=QtCore..Qt.WidgetShortcut) qres_wgt.button_list = None qres_wgt.show_new = True qres_wgt.show_join = True qres_wgt.show_split = True qres_wgt.tt = ut.tic() # Set results data if USE_FILTER_PROXY: qres_wgt.add_checkboxes(qres_wgt.show_new, qres_wgt.show_join, qres_wgt.show_split) lbl = gt.newLineEdit( qres_wgt, text= "'T' marks as correct match. 'F' marks as incorrect match. Alt brings up context menu. Double click a row to inspect matches.", editable=False, enabled=False, ) qres_wgt.layout().setSpacing(0) qres_wgt_layout = qres_wgt.layout() if hasattr(qres_wgt_layout, 'setMargin'): qres_wgt_layout.setMargin(0) else: qres_wgt_layout.setContentsMargins(0, 0, 0, 0) bottom_bar = gt.newWidget(qres_wgt, orientation=Qt.Horizontal, spacing=0, margin=0) bottom_bar.layout().setSpacing(0) bottom_bar_layout = bottom_bar.layout() if hasattr(bottom_bar_layout, 'setMargin'): bottom_bar_layout.setMargin(0) else: bottom_bar_layout.setContentsMargins(0, 0, 0, 0) lbl.setMinimumSize(0, 0) lbl.setSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Ignored) # lbl.setSizePolicy(gt.newSizePolicy()) qres_wgt.layout().addWidget(bottom_bar) bottom_bar.addWidget(lbl) bottom_bar.addNewButton( 'Mark unreviewed with higher scores as correct', pressed=qres_wgt.mark_unreviewed_above_score_as_correct, ) bottom_bar.addNewButton('Repopulate', pressed=qres_wgt.repopulate) bottom_bar.addNewButton('Edit Filters', pressed=qres_wgt.edit_filters) qres_wgt.setSizePolicy(gt.newSizePolicy()) qres_wgt.repopulate() qres_wgt.connect_signals_and_slots() if callback is None: callback = partial(ut.identity, None) qres_wgt.callback = callback qres_wgt.view.setColumnHidden(0, False) qres_wgt.view.setColumnHidden(1, False) qres_wgt.view.connect_single_key_to_slot(gt.ALT_KEY, qres_wgt.on_alt_pressed) qres_wgt.view.connect_keypress_to_slot(qres_wgt.on_special_key_pressed) if parent is None: # Register parentless QWidgets fig_presenter.register_qt4_win(qres_wgt) dbdir = qres_wgt.qreq_.ibs.get_dbdir() expt_dir = ut.ensuredir(ut.unixjoin(dbdir, 'SPECIAL_GGR_EXPT_LOGS')) review_log_dir = ut.ensuredir(ut.unixjoin(expt_dir, 'review_logs')) ts = ut.get_timestamp(isutc=True, timezone=True) log_fpath = ut.unixjoin( review_log_dir, 'review_log_%s_%s.json' % (qres_wgt.qreq_.ibs.dbname, ts)) # LOG ALL CHANGES MADE TO NAMES import logging # ut.vd(review_log_dir) # create logger with 'spam_application' logger_ = logging.getLogger('query_review') logger_.setLevel(logging.DEBUG) formatter = logging.Formatter( '%(asctime)s - %(name)s - %(levelname)s - %(message)s') # create file handler which logs even debug messages fh = logging.FileHandler(log_fpath) fh.setLevel(logging.DEBUG) fh.setFormatter(formatter) logger_.addHandler(fh) # create console handler with a higher log level ch = logging.StreamHandler() ch.setLevel(logging.INFO) ch.setFormatter(formatter) logger_.addHandler(ch) qres_wgt.logger = logger logger_.info('START QUERY_RESULT_REVIEW') logger_.info('NUM CHIP_MATCH OBJECTS (len(cm_list)=%d)' % (len(cm_list), )) logger_.info('NUM PAIRS TO EVIDENCE_DECISION (nRows=%d)' % (qres_wgt.review_api.nRows, )) logger_.info('PARENT QUERY REQUEST (cfgstr=%s)' % (qres_wgt.qreq_.get_cfgstr(with_input=True), ))
def detect_opencv_keypoints(): import cv2 import vtool as vt import numpy as np # NOQA #img_fpath = ut.grab_test_imgpath(ut.get_argval('--fname', default='lena.png')) img_fpath = ut.grab_test_imgpath( ut.get_argval('--fname', default='zebra.png')) imgBGR = vt.imread(img_fpath) imgGray = cv2.cvtColor(imgBGR, cv2.COLOR_BGR2GRAY) def from_cv2_kpts(cv2_kp): kp = (cv2_kp.pt[0], cv2_kp.pt[1], cv2_kp.size, 0, cv2_kp.size, cv2_kp.angle) return kp print('\n'.join(ut.search_module(cv2, 'create', recursive=True))) detect_factory = { #'BLOB': cv2.SimpleBlobDetector_create, #'HARRIS' : HarrisWrapper.create, #'SIFT': cv2.xfeatures2d.SIFT_create, # really DoG 'SURF': cv2.xfeatures2d.SURF_create, # really harris corners 'MSER': cv2.MSER_create, #'StarDetector_create', } extract_factory = { 'SIFT': cv2.xfeatures2d.SIFT_create, 'SURF': cv2.xfeatures2d.SURF_create, #'DAISY': cv2.xfeatures2d.DAISY_create, 'FREAK': cv2.xfeatures2d.FREAK_create, #'LATCH': cv2.xfeatures2d.LATCH_create, #'LUCID': cv2.xfeatures2d.LUCID_create, #'ORB': cv2.ORB_create, } mask = None type_to_kpts = {} type_to_desc = {} key = 'BLOB' key = 'MSER' for key in detect_factory.keys(): factory = detect_factory[key] extractor = factory() # For MSERS need to adapt shape and then convert into a keypoint repr if hasattr(extractor, 'detectRegions'): # bboxes are x,y,w,h regions, bboxes = extractor.detectRegions(imgGray) # ellipse definition from [Fitzgibbon95] # http://www.bmva.org/bmvc/1995/bmvc-95-050.pdf p518 # ell = [c_x, c_y, R_x, R_y, theta] # (cx, cy) = conic center # Rx and Ry = conic radii # theta is the counterclockwise angle fitz_ellipses = [cv2.fitEllipse(mser) for mser in regions] # http://answers.opencv.org/question/19015/how-to-use-mser-in-python/ #hulls = [cv2.convexHull(p.reshape(-1, 1, 2)) for p in regions] #hull_ells = [cv2.fitEllipse(hull[:, 0]) for hull in hulls] kpts_ = [] for ell in fitz_ellipses: ((cx, cy), (rx, ry), degrees) = ell theta = np.radians(degrees) # opencv lives in radians S = vt.scale_mat3x3(rx, ry) T = vt.translation_mat3x3(cx, cy) R = vt.rotation_mat3x3(theta) #R = np.eye(3) invVR = T.dot(R.dot(S)) kpt = vt.flatten_invV_mats_to_kpts(np.array([invVR]))[0] kpts_.append(kpt) kpts_ = np.array(kpts_) tt = ut.tic('Computing %r keypoints' % (key, )) try: cv2_kpts = extractor.detect(imgGray, mask) except Exception as ex: ut.printex(ex, 'Failed to computed %r keypoints' % (key, ), iswarning=True) pass else: ut.toc(tt) type_to_kpts[key] = cv2_kpts print(list(type_to_kpts.keys())) print(ut.depth_profile(list(type_to_kpts.values()))) print('type_to_kpts = ' + ut.repr3(type_to_kpts, truncate=True)) cv2_kpts = type_to_kpts['MSER'] kp = cv2_kpts[0] # NOQA #cv2.fitEllipse(cv2_kpts[0]) cv2_kpts = type_to_kpts['SURF'] for key in extract_factory.keys(): factory = extract_factory[key] extractor = factory() tt = ut.tic('Computing %r descriptors' % (key, )) try: filtered_cv2_kpts, desc = extractor.compute(imgGray, cv2_kpts) except Exception as ex: ut.printex(ex, 'Failed to computed %r descriptors' % (key, ), iswarning=True) pass else: ut.toc(tt) type_to_desc[key] = desc print(list(type_to_desc.keys())) print(ut.depth_profile(list(type_to_desc.values()))) print('type_to_desc = ' + ut.repr3(type_to_desc, truncate=True))
def create_databse(): def _randstr(size=6, chars=string.ascii_uppercase + string.digits): return ''.join(random.choice(chars) for _ in range(size)) sqldb_fname = 'data_test_qt.sqlite3' sqldb_dpath = utool.util_cplat.get_app_resource_dir('ibeis', 'testfiles') utool.ensuredir(sqldb_dpath) utool.util_path.remove_file(join(sqldb_dpath, sqldb_fname), dryrun=False) db = SQLDatabaseControl.SQLDatabaseController(sqldb_dpath=sqldb_dpath, sqldb_fname=sqldb_fname) encounters = [ ('encounter_id', 'INTEGER PRIMARY KEY'), ('encounter_name', 'TEXT'), ] db.add_table('encounters', encounters) rows = 1 * (10 ** 3) feats_iter = ( (_randstr(), ) for i in range(rows) ) print('[TEST] insert encounters') tt = utool.tic() db.executemany(operation=''' INSERT INTO encounters ( encounter_name ) VALUES (?) ''', params_iter=feats_iter) print(' * execute insert time=%r sec' % utool.toc(tt)) ############################################## headers = [ ('data_id', 'INTEGER PRIMARY KEY'), ('encounter_id', 'INT'), ('data_float', 'FLOAT'), ('data_int', 'INT'), ('data_text', 'TEXT'), ('data_text2', 'TEXT'), ] db.add_table('data', headers) col_name_list = [ column[0] for column in headers ] col_type_list = [ str ] * len(col_name_list) col_edit_list = [ False, True, True, True, True, True ] col_nice_list = [ 'ID', 'Encounter ID', 'TEST Float', 'TEST Int', 'TEST String 1', 'TEST String 2', ] rows = 1 * (10 ** 4) feats_iter = ((random.randint(0, 1000), random.uniform(0.0, 1.0), random.randint(0, 100), _randstr(), _randstr()) for i in range(rows) ) print('[TEST] insert data') tt = utool.tic() db.executemany(operation=''' INSERT INTO data ( encounter_id, data_float, data_int, data_text, data_text2 ) VALUES (?,?,?,?,?) ''', params_iter=feats_iter) print(' * execute insert time=%r sec' % utool.toc(tt)) return col_name_list, col_type_list, col_edit_list, col_nice_list, db
def detect_opencv_keypoints(): import cv2 import vtool as vt import numpy as np # NOQA #img_fpath = ut.grab_test_imgpath(ut.get_argval('--fname', default='lena.png')) img_fpath = ut.grab_test_imgpath(ut.get_argval('--fname', default='zebra.png')) imgBGR = vt.imread(img_fpath) imgGray = cv2.cvtColor(imgBGR, cv2.COLOR_BGR2GRAY) def from_cv2_kpts(cv2_kp): kp = (cv2_kp.pt[0], cv2_kp.pt[1], cv2_kp.size, 0, cv2_kp.size, cv2_kp.angle) return kp print('\n'.join(ut.search_module(cv2, 'create', recursive=True))) detect_factory = { #'BLOB': cv2.SimpleBlobDetector_create, #'HARRIS' : HarrisWrapper.create, #'SIFT': cv2.xfeatures2d.SIFT_create, # really DoG 'SURF': cv2.xfeatures2d.SURF_create, # really harris corners 'MSER': cv2.MSER_create, #'StarDetector_create', } extract_factory = { 'SIFT': cv2.xfeatures2d.SIFT_create, 'SURF': cv2.xfeatures2d.SURF_create, #'DAISY': cv2.xfeatures2d.DAISY_create, 'FREAK': cv2.xfeatures2d.FREAK_create, #'LATCH': cv2.xfeatures2d.LATCH_create, #'LUCID': cv2.xfeatures2d.LUCID_create, #'ORB': cv2.ORB_create, } mask = None type_to_kpts = {} type_to_desc = {} key = 'BLOB' key = 'MSER' for key in detect_factory.keys(): factory = detect_factory[key] extractor = factory() # For MSERS need to adapt shape and then convert into a keypoint repr if hasattr(extractor, 'detectRegions'): # bboxes are x,y,w,h regions, bboxes = extractor.detectRegions(imgGray) # ellipse definition from [Fitzgibbon95] # http://www.bmva.org/bmvc/1995/bmvc-95-050.pdf p518 # ell = [c_x, c_y, R_x, R_y, theta] # (cx, cy) = conic center # Rx and Ry = conic radii # theta is the counterclockwise angle fitz_ellipses = [cv2.fitEllipse(mser) for mser in regions] # http://answers.opencv.org/question/19015/how-to-use-mser-in-python/ #hulls = [cv2.convexHull(p.reshape(-1, 1, 2)) for p in regions] #hull_ells = [cv2.fitEllipse(hull[:, 0]) for hull in hulls] kpts_ = [] for ell in fitz_ellipses: ((cx, cy), (rx, ry), degrees) = ell theta = np.radians(degrees) # opencv lives in radians S = vt.scale_mat3x3(rx, ry) T = vt.translation_mat3x3(cx, cy) R = vt.rotation_mat3x3(theta) #R = np.eye(3) invVR = T.dot(R.dot(S)) kpt = vt.flatten_invV_mats_to_kpts(np.array([invVR]))[0] kpts_.append(kpt) kpts_ = np.array(kpts_) tt = ut.tic('Computing %r keypoints' % (key,)) try: cv2_kpts = extractor.detect(imgGray, mask) except Exception as ex: ut.printex(ex, 'Failed to computed %r keypoints' % (key,), iswarning=True) pass else: ut.toc(tt) type_to_kpts[key] = cv2_kpts print(list(type_to_kpts.keys())) print(ut.depth_profile(list(type_to_kpts.values()))) print('type_to_kpts = ' + ut.repr3(type_to_kpts, truncate=True)) cv2_kpts = type_to_kpts['MSER'] kp = cv2_kpts[0] # NOQA #cv2.fitEllipse(cv2_kpts[0]) cv2_kpts = type_to_kpts['SURF'] for key in extract_factory.keys(): factory = extract_factory[key] extractor = factory() tt = ut.tic('Computing %r descriptors' % (key,)) try: filtered_cv2_kpts, desc = extractor.compute(imgGray, cv2_kpts) except Exception as ex: ut.printex(ex, 'Failed to computed %r descriptors' % (key,), iswarning=True) pass else: ut.toc(tt) type_to_desc[key] = desc print(list(type_to_desc.keys())) print(ut.depth_profile(list(type_to_desc.values()))) print('type_to_desc = ' + ut.repr3(type_to_desc, truncate=True))
def create_databse(): def _randstr(size=6, chars=string.ascii_uppercase + string.digits): return ''.join(random.choice(chars) for _ in range(size)) sqldb_fname = 'data_test_qt.sqlite3' sqldb_dpath = utool.util_cplat.get_app_resource_dir('ibeis', 'testfiles') utool.ensuredir(sqldb_dpath) utool.util_path.remove_file(join(sqldb_dpath, sqldb_fname), dryrun=False) db = SQLDatabaseControl.SQLDatabaseController(sqldb_dpath=sqldb_dpath, sqldb_fname=sqldb_fname) imagesets = [ ('imageset_id', 'INTEGER PRIMARY KEY'), ('imageset_name', 'TEXT'), ] db.add_table('imagesets', imagesets) rows = 1 * (10**3) feats_iter = ((_randstr(), ) for i in range(rows)) print('[TEST] insert imagesets') tt = utool.tic() db.executemany(operation=''' INSERT INTO imagesets ( imageset_name ) VALUES (?) ''', params_iter=feats_iter) print(' * execute insert time=%r sec' % utool.toc(tt)) ############################################## headers = [ ('data_id', 'INTEGER PRIMARY KEY'), ('imageset_id', 'INT'), ('data_float', 'FLOAT'), ('data_int', 'INT'), ('data_text', 'TEXT'), ('data_text2', 'TEXT'), ] db.add_table('data', headers) col_name_list = [column[0] for column in headers] col_type_list = [str] * len(col_name_list) col_edit_list = [False, True, True, True, True, True] col_nice_list = [ 'ID', 'ImageSet ID', 'TEST Float', 'TEST Int', 'TEST String 1', 'TEST String 2', ] rows = 1 * (10**4) feats_iter = ((random.randint(0, 1000), random.uniform(0.0, 1.0), random.randint(0, 100), _randstr(), _randstr()) for i in range(rows)) print('[TEST] insert data') tt = utool.tic() db.executemany(operation=''' INSERT INTO data ( imageset_id, data_float, data_int, data_text, data_text2 ) VALUES (?,?,?,?,?) ''', params_iter=feats_iter) print(' * execute insert time=%r sec' % utool.toc(tt)) return col_name_list, col_type_list, col_edit_list, col_nice_list, db