def query_chips_graph_microsoft(ibs, *args, **kwargs): endpoint = kwargs.pop('endpoint') result = ibs.query_chips_graph(*args, **kwargs) cm_dict = result['cm_dict'] cm_key_list = list(cm_dict.keys()) assert len(cm_key_list) == 1 cm_key = cm_key_list[0] cm = cm_dict[cm_key] qaid = ibs.get_annot_aids_from_uuid(cm['qannot_uuid']) reference = cm['dannot_extern_reference'] response = { 'annotations': [], 'names': [], } zipped = list( zip(cm['annot_score_list'], cm['dannot_uuid_list'], cm['dannot_extern_list'])) zipped = sorted(zipped, reverse=True) for dannot_score, dannot_uuid, dannot_extern in zipped: daid = ibs.get_annot_aids_from_uuid(dannot_uuid) annotation = _annotation(ibs, daid) extern_url_postfix = None if dannot_extern: query_annotation_ = ut.to_json(_annotation(ibs, qaid)).replace(' ', '') database_annotation_ = ut.to_json(_annotation(ibs, daid)).replace( ' ', '') args = ( endpoint, reference, query_annotation_, database_annotation_, ) extern_url_postfix = ( '%s?reference=%s&query_annotation=%s&database_annotation=%s' % args) response['annotations'].append({ 'score': dannot_score, 'annotation': annotation, 'visualize': extern_url_postfix, }) zipped = list(zip(cm['name_score_list'], cm['unique_name_list'])) zipped = sorted(zipped, reverse=True) for name_score, name_text in zipped: if name_text == const.UNKNOWN: continue nid = ibs.get_name_rowids_from_text(name_text) response['names'].append({ 'score': name_score, 'name': _name(ibs, nid) }) return response
def log_detections(ibs, aid_list, fallback=True): import time import os json_log_path = ibs.get_logdir_local() json_log_filename = 'detections.json' json_log_filepath = os.path.join(json_log_path, json_log_filename) logger.info('Logging detections added to: %r' % (json_log_filepath,)) try: # Log has never been made, create one if not os.path.exists(json_log_filepath): json_dict = { 'updates': [], } json_str = ut.to_json(json_dict, pretty=True) with open(json_log_filepath, 'w') as json_log_file: json_log_file.write(json_str) # Get current log state with open(json_log_filepath, 'r') as json_log_file: json_str = json_log_file.read() json_dict = ut.from_json(json_str) # Get values db_name = ibs.get_db_name() db_init_uuid = ibs.get_db_init_uuid() # Zip all the updates together and write to updates list in dictionary gid_list = ibs.get_annot_gids(aid_list) bbox_list = ibs.get_annot_bboxes(aid_list) theta_list = ibs.get_annot_thetas(aid_list) zipped = list(zip(aid_list, gid_list, bbox_list, theta_list)) for aid, gid, bbox, theta in zipped: json_dict['updates'].append( { 'time_unixtime': time.time(), 'db_name': db_name, 'db_init_uuid': db_init_uuid, 'image_rowid': gid, 'annot_rowid': aid, 'annot_bbox': bbox, 'annot_theta': theta, } ) # Write new log state json_str = ut.to_json(json_dict, pretty=True) with open(json_log_filepath, 'w') as json_log_file: json_log_file.write(json_str) except Exception: if fallback: logger.info('WRITE DETECTION.JSON FAILED - ATTEMPTING FALLBACK') ut.delete(json_log_filepath) ibs.log_detections(aid_list, fallback=False) else: logger.info('WRITE DETECTION.JSON FAILED - FALLBACK FAILED')
def add_test( ibs, test_challenge_list, test_response_list, test_result_list=None, test_uuid_list=None, test_user_identity_list=None, ): assert len(test_challenge_list) == len(test_response_list) n_input = len(test_challenge_list) test_challenge_list = [ ut.to_json(test_challenge) for test_challenge in test_challenge_list ] test_response_list = [ ut.to_json(test_response) for test_response in test_response_list ] if test_uuid_list is None: test_uuid_list = [uuid.uuid4() for _ in range(n_input)] if test_result_list is None: test_result_list = [None] * n_input if test_user_identity_list is None: test_user_identity_list = [None] * n_input superkey_paramx = (0, ) colnames = [ TEST_UUID, TEST_CHALLENGE, TEST_RESPONSE, TEST_RESULT, TEST_USER_IDENTITY, ] params_iter = list( zip( test_uuid_list, test_challenge_list, test_response_list, test_result_list, test_user_identity_list, )) test_rowid_list = ibs.staging.add_cleanly( const.TEST_TABLE, colnames, params_iter, ibs.get_test_rowids_from_uuid, superkey_paramx, ) return test_rowid_list
def on_engine_request(ibs, jobid, action, args, kwargs): """ Run whenever the engine recieves a message """ # Start working if VERBOSE_JOBS: print('starting job=%r' % (jobid,)) # Map actions to IBEISController calls here if action == 'helloworld': def helloworld(time_=0, *args, **kwargs): time.sleep(time_) retval = ('HELLO time_=%r ' % (time_,)) + ut.repr2((args, kwargs)) return retval action_func = helloworld else: # check for ibs func action_func = getattr(ibs, action) if VERBOSE_JOBS: print('resolving action=%r to ibeis function=%r' % (action, action_func)) try: result = action_func(*args, **kwargs) exec_status = 'ok' except Exception as ex: result = ut.formatex(ex, keys=['jobid'], tb=True) result = ut.strip_ansi(result) exec_status = 'exception' json_result = ut.to_json(result) engine_result = dict( exec_status=exec_status, json_result=json_result, jobid=jobid, ) return engine_result
def send_multipart_json(sock, idents, reply): """ helper """ reply_json = ut.to_json(reply).encode('utf-8') multi_reply = idents + [reply_json] sock.send_multipart(multi_reply)
def translate_ibeis_webreturn(rawreturn, success=True, code=None, message=None, jQuery_callback=None, cache=None): if code is None: code = '' if message is None: message = '' if cache is None: cache = -1 template = { 'status': { 'success': success, 'code': code, 'message': message, 'cache': cache, #'debug': {} # TODO }, 'response' : rawreturn } try: response = ut.to_json(template) except: ut.embed() if jQuery_callback is not None and isinstance(jQuery_callback, six.string_types): print('[web] Including jQuery callback function: %r' % (jQuery_callback, )) response = '%s(%s)' % (jQuery_callback, response) return response
def tryout_web_graphs(self, infr): """ https://plot.ly/python/ http://bokeh.pydata.org/en/latest/ pip install bokeh Notes: http://www.coppelia.io/2014/07/an-a-to-z-of-extra-features-for-the-d3-force-layout/ http://andrewmellor.co.uk/blog/articles/2014/12/14/d3-networks/ pip install plotly # eww need to sign up and get a key http://igraph.org/ import mpld3 mpld3.save_html(fig, open('fig.html', 'w')) mpld3.save_json(fig, open('fig.json', 'w')) fig = pt.gcf() """ #import plottool as pt # http://andrewmellor.co.uk/blog/articles/2014/12/14/d3-networks/ from networkx.readwrite import json_graph G = infr.graph data = json_graph.node_link_data(G) json_text = ut.to_json(data, pretty=True) ut.writeto('graph.json', json_text) ut.editfile('graph.json') ut.startfile('d3_example.html')
def on_engine_request(ibs, jobid, action, args, kwargs): """ Run whenever the engine recieves a message """ # Start working if VERBOSE_JOBS: print('starting job=%r' % (jobid, )) # Map actions to IBEISController calls here if action == 'helloworld': def helloworld(time_=0, *args, **kwargs): time.sleep(time_) retval = ('HELLO time_=%r ' % (time_, )) + ut.repr2((args, kwargs)) return retval action_func = helloworld else: # check for ibs func action_func = getattr(ibs, action) if VERBOSE_JOBS: print('resolving action=%r to ibeis function=%r' % (action, action_func)) try: result = action_func(*args, **kwargs) exec_status = 'ok' except Exception as ex: result = ut.formatex(ex, keys=['jobid'], tb=True) result = ut.strip_ansi(result) exec_status = 'exception' json_result = ut.to_json(result) engine_result = dict( exec_status=exec_status, json_result=json_result, jobid=jobid, ) return engine_result
def _detect_remote_push_parts(ibs, part_rowid_list): route_url = _construct_route_url('/api/part/json/') logger.info('\tSending...') data_dict = { 'annot_uuid_list': ibs.get_part_annot_uuids(part_rowid_list), 'part_uuid_list': ibs.get_part_uuids(part_rowid_list), 'part_bbox_list': ibs.get_part_bboxes(part_rowid_list), } for key in data_dict: data_dict[key] = ut.to_json(data_dict[key]) response = requests.post(route_url, data=data_dict) _verify_response(response) logger.info('\t...sent')
def _detect_remote_push_metadata(ibs, route_rule, uuid_str, value_str, uuid_list, value_list): route_url = _construct_route_url(route_rule) print('\tSetting %s metadata for %s' % (route_rule, uuid_str, )) data_dict = { uuid_str: uuid_list, value_str: value_list, } for key in data_dict: data_dict[key] = ut.to_json(data_dict[key]) response = requests.put(route_url, data=data_dict) _verify_response(response) print('\t...set')
def _detect_remote_push_annots(ibs, aid_list): route_url = _construct_route_url('/api/annot/json/') print('\tSending...') data_dict = { 'image_uuid_list': ibs.get_annot_image_uuids(aid_list), 'annot_uuid_list': ibs.get_annot_uuids(aid_list), 'annot_bbox_list': ibs.get_annot_bboxes(aid_list), } for key in data_dict: data_dict[key] = ut.to_json(data_dict[key]) response = requests.post(route_url, data=data_dict) _verify_response(response) print('\t...sent')
def _sync_get_auuid_endpoint(ibs, endpoint, auuid_list): # make sure auuid_list is correct format working_list = auuid_list.copy() if len(working_list) > 0 and type(working_list[0]) is str: from uuid import UUID working_list = [UUID(item) for item in working_list] serialized_uuids = ut.to_json(working_list) route_url = _construct_route_url(endpoint) logger.info('\tGetting info on %d auuids from %s' % (len(auuid_list), route_url)) data_dict = {'uuid_list': serialized_uuids} return _get(endpoint, data=data_dict)
def _detect_remote_push_imageset(ibs, image_uuid_list): route_url = _construct_route_url('/api/image/imageset/text/json/') db_name = ibs.get_dbname() db_uuid = ibs.get_db_init_uuid() time_str = ut.get_timestamp() imageset_text = 'Sync from %s (%s) at %s' % (db_name, db_uuid, time_str) imageset_text_list = [imageset_text] * len(image_uuid_list) data_dict = { 'image_uuid_list': image_uuid_list, 'imageset_text_list': imageset_text_list, } for key in data_dict: data_dict[key] = ut.to_json(data_dict[key]) response = requests.put(route_url, data=data_dict) _verify_response(response)
def set_part_staged_metadata(ibs, part_rowid_list, metadata_dict_list): r""" Sets the part's staged metadata using a metadata dictionary RESTful: Method: PUT URL: /api/part/staged/metadata/ CommandLine: python -m wbia.control.manual_part_funcs --test-set_part_staged_metadata Example: >>> # ENABLE_DOCTEST >>> from wbia.control.manual_part_funcs import * # NOQA >>> import wbia >>> import random >>> # build test data >>> ibs = wbia.opendb('testdb1') >>> aid_list = ibs.get_valid_aids()[0:1] >>> bbox_list = [[0, 0, 100, 100]] * len(aid_list) >>> part_rowid_list = ibs.add_parts(aid_list, bbox_list=bbox_list) >>> metadata_dict_list = [ >>> {'test': random.uniform(0.0, 1.0)}, >>> ] * len(part_rowid_list) >>> print(ut.repr2(metadata_dict_list)) >>> ibs.set_part_staged_metadata(part_rowid_list, metadata_dict_list) >>> # verify results >>> metadata_dict_list_ = ibs.get_part_staged_metadata(part_rowid_list) >>> print(ut.repr2(metadata_dict_list_)) >>> assert metadata_dict_list == metadata_dict_list_ >>> metadata_str_list = [ut.to_json(metadata_dict) for metadata_dict in metadata_dict_list] >>> print(ut.repr2(metadata_str_list)) >>> metadata_str_list_ = ibs.get_part_staged_metadata(part_rowid_list, return_raw=True) >>> print(ut.repr2(metadata_str_list_)) >>> assert metadata_str_list == metadata_str_list_ >>> ibs.delete_parts(part_rowid_list) """ id_iter = ((part_rowid, ) for part_rowid in part_rowid_list) metadata_str_list = [] for metadata_dict in metadata_dict_list: metadata_str = ut.to_json(metadata_dict) metadata_str_list.append(metadata_str) val_list = ((metadata_str, ) for metadata_str in metadata_str_list) ibs.db.set(const.PART_TABLE, ('part_staged_metadata_json', ), val_list, id_iter)
def set_part_contour(ibs, part_rowid_list, contour_dict_list): r""" Sets the part's contour using a contour dictionary RESTful: Method: PUT URL: /api/part/contour/ CommandLine: python -m ibeis.control.manual_part_funcs --test-set_part_contour Example: >>> # ENABLE_DOCTEST >>> from ibeis.control.manual_part_funcs import * # NOQA >>> import ibeis >>> import random >>> # build test data >>> ibs = ibeis.opendb('testdb1') >>> aid_list = ibs.get_valid_aids()[0:1] >>> bbox_list = [[0, 0, 100, 100]] * len(aid_list) >>> part_rowid_list = ibs.add_parts(aid_list, bbox_list=bbox_list) >>> contour_dict_list = [ >>> {'test': random.uniform(0.0, 1.0)}, >>> ] >>> print(ut.repr2(contour_dict_list)) >>> ibs.set_part_contour(part_rowid_list, contour_dict_list) >>> # verify results >>> contour_dict_list_ = ibs.get_part_contour(part_rowid_list) >>> print(ut.repr2(contour_dict_list_)) >>> assert contour_dict_list == contour_dict_list_ >>> contour_str_list = [ut.to_json(contour_dict) for contour_dict in contour_dict_list] >>> print(ut.repr2(contour_str_list)) >>> contour_str_list_ = ibs.get_part_contour(part_rowid_list, return_raw=True) >>> print(ut.repr2(contour_str_list_)) >>> assert contour_str_list == contour_str_list_ >>> ibs.delete_parts(part_rowid_list) """ id_iter = ((part_rowid, ) for part_rowid in part_rowid_list) contour_str_list = [] for contour_dict in contour_dict_list: contour_str = ut.to_json(contour_dict) contour_str_list.append(contour_str) val_list = ((contour_str, ) for contour_str in contour_str_list) ibs.db.set(const.PART_TABLE, ('part_contour_json', ), val_list, id_iter)
def set_review_metadata(ibs, review_rowid_list, metadata_dict_list): r""" Sets the review's metadata using a metadata dictionary RESTful: Method: PUT URL: /api/review/metadata/ CommandLine: python -m wbia.control.manual_review_funcs --test-set_review_metadata Example: >>> # ENABLE_DOCTEST >>> from wbia.control.manual_review_funcs import * # NOQA >>> import wbia >>> import random >>> # build test data >>> ibs = wbia.opendb('testdb1') >>> review_rowid_list = ibs.add_review([1], [2], [0]) >>> metadata_dict_list = [ >>> {'test': random.uniform(0.0, 1.0)}, >>> ] >>> print(ut.repr2(metadata_dict_list)) >>> ibs.set_review_metadata(review_rowid_list, metadata_dict_list) >>> # verify results >>> metadata_dict_list_ = ibs.get_review_metadata(review_rowid_list) >>> print(ut.repr2(metadata_dict_list_)) >>> assert metadata_dict_list == metadata_dict_list_ >>> metadata_str_list = [ut.to_json(metadata_dict) for metadata_dict in metadata_dict_list] >>> print(ut.repr2(metadata_str_list)) >>> metadata_str_list_ = ibs.get_review_metadata(review_rowid_list, return_raw=True) >>> print(ut.repr2(metadata_str_list_)) >>> assert metadata_str_list == metadata_str_list_ >>> ibs.delete_review(review_rowid_list) """ id_iter = ((review_rowid, ) for review_rowid in review_rowid_list) metadata_str_list = [] for metadata_dict in metadata_dict_list: metadata_str = ut.to_json(metadata_dict) metadata_str_list.append(metadata_str) val_list = ((metadata_str, ) for metadata_str in metadata_str_list) ibs.staging.set(const.REVIEW_TABLE, ('review_metadata_json', ), val_list, id_iter)
def get_config_rowid(table, config=None): #config_hashid = config.get('feat_cfgstr') #assert config_hashid is not None # TODO store config_rowid in qparams #else: # config_hashid = db.cfg.feat_cfg.get_cfgstr() if config is not None: try: #config_hashid = 'none' config_hashid = config.get(table.tablename + '_hashid') except KeyError: try: subconfig = config.get(table.tablename + '_config') config_hashid = ut.hashstr27(ut.to_json(subconfig)) except KeyError: print('Warning: Config must either contain a string <tablename>_hashid or a dict <tablename>_config') raise else: config_hashid = 'none' config_rowid = table.add_config(config_hashid) return config_rowid
def translate_wbia_webreturn( rawreturn, success=True, code=None, message=None, jQuery_callback=None, cache=None, __skip_microsoft_validation__=False, ): if MICROSOFT_API_ENABLED and not __skip_microsoft_validation__: if rawreturn is not None: assert isinstance( rawreturn, dict ), 'Microsoft APIs must return a Python dictionary' template = rawreturn else: if code is None: code = '' if message is None: message = '' if cache is None: cache = -1 template = { 'status': { 'success': success, 'code': code, 'message': message, 'cache': cache, # 'debug': {} # TODO }, 'response': rawreturn, } response = ut.to_json(template) if jQuery_callback is not None and isinstance(jQuery_callback, six.string_types): logger.info('[web] Including jQuery callback function: %r' % (jQuery_callback,)) response = '%s(%s)' % (jQuery_callback, response) return response
def translate_ibeis_webreturn(rawreturn, success=True, code=None, message=None, jQuery_callback=None, cache=None): if code is None: code = '' if message is None: message = '' if cache is None: cache = -1 template = { 'status': { 'success': success, 'code': code, 'message': message, 'cache': cache, #'debug': {} # TODO }, 'response' : rawreturn } response = ut.to_json(template) if jQuery_callback is not None and isinstance(jQuery_callback, six.string_types): print('[web] Including jQuery callback function: %r' % (jQuery_callback, )) response = '%s(%s)' % (jQuery_callback, response) return response
def engine_loop(id_, dbdir=None): r""" IBEIS: This will be part of a worker process with its own IBEISController instance. Needs to send where the results will go and then publish the results there. The engine_loop - receives messages, performs some action, and sends a reply, preserving the leading two message parts as routing identities """ import ibeis #base_print = print # NOQA print = partial(ut.colorprint, color='darkred') with ut.Indenter('[engine %d] ' % (id_)): if VERBOSE_JOBS: print('Initializing engine') print('connect engine_url2 = %r' % (engine_url2,)) assert dbdir is not None #ibs = ibeis.opendb(dbname) ibs = ibeis.opendb(dbdir=dbdir, use_cache=False, web=False) engine_rout_sock = ctx.socket(zmq.ROUTER) engine_rout_sock.connect(engine_url2) collect_deal_sock = ctx.socket(zmq.DEALER) collect_deal_sock.setsockopt_string(zmq.IDENTITY, 'engine.collect.DEALER') collect_deal_sock.connect(collect_url1) if VERBOSE_JOBS: print('connect collect_url1 = %r' % (collect_url1,)) print('engine is initialized') while True: idents, engine_request = rcv_multipart_json(engine_rout_sock, print=print) action = engine_request['action'] jobid = engine_request['jobid'] args = engine_request['args'] kwargs = engine_request['kwargs'] callback_url = engine_request['callback_url'] # Start working if VERBOSE_JOBS: print('starting job=%r' % (jobid,)) # Map actions to IBEISController calls here if action == 'helloworld': def helloworld(time_=0, *args, **kwargs): time.sleep(time_) retval = ('HELLO time_=%r ' % (time_,)) + ut.repr2((args, kwargs)) return retval action_func = helloworld else: # check for ibs func action_func = getattr(ibs, action) if VERBOSE_JOBS: print('resolving to ibeis function') try: result = action_func(*args, **kwargs) exec_status = 'ok' except Exception as ex: result = ut.formatex(ex, keys=['jobid'], tb=True) exec_status = 'exception' json_result = ut.to_json(result) engine_result = dict( exec_status=exec_status, json_result=json_result, jobid=jobid, ) # Store results in the collector collect_request = dict( idents=idents, action='store', jobid=jobid, engine_result=engine_result, callback_url=callback_url, ) if VERBOSE_JOBS: print('...done working. pushing result to collector') collect_deal_sock.send_json(collect_request) # ---- if VERBOSE_JOBS: print('Exiting engine loop')
def ut_to_json_encode(dict_): # Encode correctly for UUIDs and other information for key in dict_: dict_[key] = ut.to_json(dict_[key]) return dict_
def learn_featscore_normalizer(qreq_, datakw={}, learnkw={}): r""" Takes the result of queries and trains a score encoder Args: qreq_ (ibeis.QueryRequest): query request object with hyper-parameters Returns: vtool.ScoreNormalizer: encoder CommandLine: python -m ibeis --tf learn_featscore_normalizer --show -t default: python -m ibeis --tf learn_featscore_normalizer --show --fsvx=0 --threshx=1 --show python -m ibeis --tf learn_featscore_normalizer --show -a default:size=40 -t default:fg_on=False,lnbnn_on=False,ratio_thresh=1.0,K=1,Knorm=6,sv_on=False,normalizer_rule=name --fsvx=0 --threshx=1 --show python -m ibeis --tf learn_featscore_normalizer --show --disttype=ratio python -m ibeis --tf learn_featscore_normalizer --show --disttype=lnbnn python -m ibeis --tf learn_featscore_normalizer --show --disttype=L2_sift -t default:K=1 python -m ibeis --tf learn_featscore_normalizer --show --disttype=L2_sift -a timectrl -t default:K=1 --db PZ_Master1 python -m ibeis --tf learn_featscore_normalizer --show --disttype=ratio -a timectrl -t default:K=1 --db PZ_Master1 python -m ibeis --tf learn_featscore_normalizer --show --disttype=lnbnn -a timectrl -t default:K=1 --db PZ_Master1 # LOOK AT THIS python -m ibeis --tf learn_featscore_normalizer --show --disttype=normdist -a timectrl -t default:K=1 --db PZ_Master1 #python -m ibeis --tf learn_featscore_normalizer --show --disttype=parzen -a timectrl -t default:K=1 --db PZ_Master1 #python -m ibeis --tf learn_featscore_normalizer --show --disttype=norm_parzen -a timectrl -t default:K=1 --db PZ_Master1 python -m ibeis --tf learn_featscore_normalizer --show --disttype=lnbnn --db PZ_Master1 -a timectrl -t best Example: >>> # ENABLE_DOCTEST >>> from ibeis.algo.hots.scorenorm import * # NOQA >>> import ibeis >>> learnkw = {} >>> datakw = NormFeatScoreConfig.from_argv_dict() >>> qreq_ = ibeis.testdata_qreq_( >>> defaultdb='PZ_MTEST', a=['default'], p=['default']) >>> encoder = learn_featscore_normalizer(qreq_, datakw, learnkw) >>> ut.quit_if_noshow() >>> encoder.visualize(figtitle=encoder.get_cfgstr()) >>> ut.show_if_requested() """ cm_list = qreq_.execute() print('learning scorenorm') print('datakw = %s' % ut.repr3(datakw)) tp_scores, tn_scores, scorecfg = get_training_featscores( qreq_, cm_list, **datakw) _learnkw = dict(monotonize=True, adjust=2) _learnkw.update(learnkw) encoder = vt.ScoreNormalizer(**_learnkw) encoder.fit_partitioned(tp_scores, tn_scores, verbose=False) # ut.hashstr27(qreq_.get_cfgstr()) # Maintain regen command info: TODO: generalize and integrate encoder._regen_info = { 'cmd': 'python -m ibeis --tf learn_featscore_normalizer', 'scorecfg': scorecfg, 'learnkw': learnkw, 'datakw': datakw, 'qaids': qreq_.qaids, 'daids': qreq_.daids, 'qreq_cfg': qreq_.get_full_cfgstr(), 'qreq_regen_info': getattr(qreq_, '_regen_info', {}), } # 'timestamp': ut.get_printable_timestamp(), scorecfg_safe = scorecfg scorecfg_safe = re.sub('[' + re.escape('()= ') + ']', '', scorecfg_safe) scorecfg_safe = re.sub('[' + re.escape('+*<>[]') + ']', '_', scorecfg_safe) hashid = ut.hashstr27(ut.to_json(encoder._regen_info)) naidinfo = ('q%s_d%s' % (len(qreq_.qaids), len(qreq_.daids))) cfgstr = 'featscore_{}_{}_{}_{}'.format(scorecfg_safe, qreq_.ibs.get_dbname(), naidinfo, hashid) encoder.cfgstr = cfgstr return encoder
def _write_dict_to_sqlite3(dict_): return ut.to_json(dict_)
def _sync_get_aids_for_uuids(ibs, uuids): data_dict = { 'uuid_list': ut.to_json(list(uuids)), } return ibs._get_ibs('/api/annot/rowid/uuid/', data=data_dict)
def _write_dict_to_sqlite3(dict_): return ut.to_json(dict_)
def save_json(fpath, data, **kwargs): import utool as ut json_data = ut.to_json(data, **kwargs) ut.save_text(fpath, json_data)
def translated_call(**kwargs): def html_newlines(text): r = '<br />\n' text = text.replace(' ', ' ') text = ( text.replace('\r\n', r) .replace('\n\r', r) .replace('\r', r) .replace('\n', r) ) return text __format__ = False # Default __format__ value ignore_cookie_set = False try: # logger.info('Processing: %r with args: %r and kwargs: %r' % (func, args, kwargs, )) # Pipe web input into Python web call kwargs2 = _process_input(flask.request.args) kwargs3 = _process_input(flask.request.form) try: # kwargs4 = _process_input(flask.request.get_json()) kwargs4 = ut.from_json(flask.request.data) except Exception: kwargs4 = {} kwargs.update(kwargs2) kwargs.update(kwargs3) kwargs.update(kwargs4) # Update the request object to include the final rectified inputs for possible future reference flask.request.processed = ut.to_json(kwargs) jQuery_callback = None if 'callback' in kwargs and 'jQuery' in kwargs['callback']: jQuery_callback = str(kwargs.pop('callback', None)) kwargs.pop('_', None) # logger.info('KWARGS: %s' % (kwargs, )) # logger.info('COOKIES: %s' % (request.cookies, )) __format__ = request.cookies.get('__format__', None) __format__ = kwargs.pop('__format__', __format__) if __format__ is not None: __format__ = str(__format__).lower() ignore_cookie_set = __format__ in ['onetime', 'true'] __format__ = __format__ in ['true', 'enabled', 'enable'] from wbia.web.app import PROMETHEUS if PROMETHEUS: exclude_tag_list = [ '/api/test/heartbeat/', '/v0.1/wildbook/status/', '/v0.1/vulcan/status/', ] tag = request.url_rule.rule if tag not in exclude_tag_list: ibs = flask.current_app.ibs ibs.prometheus_increment_api(tag) resp_tup = translate_wbia_webcall(func, **kwargs) rawreturn, success, code, message = resp_tup except WebException as webex: # ut.printex(webex) logger.info('CAUGHT2: %r' % (webex,)) rawreturn = webex.get_rawreturn( DEBUG_PYTHON_STACK_TRACE_JSON_RESPONSE ) success = False code = webex.code message = webex.message jQuery_callback = None except Exception as ex: logger.info('CAUGHT2: %r' % (ex,)) # ut.printex(ex) rawreturn = None if DEBUG_PYTHON_STACK_TRACE_JSON_RESPONSE: rawreturn = str(traceback.format_exc()) success = False code = 500 message = str(ex) # errmsg = str(ex) # message = 'API error, Python Exception thrown: %s' % (errmsg) if "'int' object is not iterable" in message: rawreturn = """ HINT: the input for this call is most likely expected to be a list. Try adding a comma at the end of the input (to cast the conversion into a list) or encapsulate the input with []. """ jQuery_callback = None # logger.info('RECEIVED FORMAT: %r' % (__format__, )) if __format__: # Hack for readable error messages webreturn = translate_wbia_webreturn( rawreturn, success, code, message, jQuery_callback ) webreturn = ut.repr3(ut.from_json(webreturn), strvals=True) try: from ansi2html import Ansi2HTMLConverter conv = Ansi2HTMLConverter() webreturn = conv.convert(webreturn) except ImportError as ex: ut.printex(ex, 'pip install ansi2html', iswarning=True) webreturn = ut.strip_ansi(webreturn) webreturn = ( '<p><samp>\n' + html_newlines(webreturn) + '\n</samp></p>' ) webreturn = ( '<meta http-equiv="Content-Type" content="text/html;charset=ISO-8859-8">\n' + webreturn ) def get_func_href(funcname): url = ( 'http://' + request.environ['HTTP_HOST'] + flask.url_for(funcname) + '?__format__=True' ) return '<a href="{url}">{url}</a>'.format(url=url) if not success: webreturn += ( '<pre>See logs for details: %s</pre>' % get_func_href('get_current_log_text') ) webreturn += ( '<pre>Might also look into db_info: %s</pre>' % get_func_href('get_dbinfo') ) else: webreturn = translate_wbia_webreturn( rawreturn, success, code, message, jQuery_callback ) webreturn = ut.strip_ansi(webreturn) resp = flask.make_response(webreturn, code) resp.status_code = code if not __format__: resp.headers['Content-Type'] = 'application/json; charset=utf-8' resp.headers['mimetype'] = 'application/json' if not ignore_cookie_set: if __format__: resp.set_cookie('__format__', 'enabled') else: resp.set_cookie('__format__', '', expires=0) return resp
def get_pipe_cfgstr(self): return ut.to_json(self.config)
def send_multipart_json(sock, idents, reply): """ helper """ reply_json = ut.to_json(reply).encode('utf-8') multi_reply = idents + [reply_json] sock.send_multipart(multi_reply)
def save_json(fpath, data, **kwargs): import utool as ut json_data = ut.to_json(data, **kwargs) ut.save_text(fpath, json_data)