def sort_window_ids(winid_list, order='mru'): """ Orders window ids by most recently used """ import utool as ut winid_order = XCtrl.sorted_window_ids(order) sorted_win_ids = ut.isect(winid_order, winid_list) return sorted_win_ids
def draw_twoday_count(ibs, visit_info_list_): import copy visit_info_list = copy.deepcopy(visit_info_list_) aids_day1, aids_day2 = ut.take_column(visit_info_list_, 'aids') nids_day1, nids_day2 = ut.take_column(visit_info_list_, 'unique_nids') resight_nids = ut.isect(nids_day1, nids_day2) if False: # HACK REMOVE DATA TO MAKE THIS FASTER num = 20 for info in visit_info_list: non_resight_nids = list(set(info['unique_nids']) - set(resight_nids)) sample_nids2 = non_resight_nids[0:num] + resight_nids[:num] info['grouped_aids'] = ut.dict_subset(info['grouped_aids'], sample_nids2) info['unique_nids'] = sample_nids2 # Build a graph of matches if False: debug = False for info in visit_info_list: edges = [] grouped_aids = info['grouped_aids'] aids_list = list(grouped_aids.values()) ams_list = ibs.get_annotmatch_rowids_in_cliques(aids_list) aids1_list = ibs.unflat_map(ibs.get_annotmatch_aid1, ams_list) aids2_list = ibs.unflat_map(ibs.get_annotmatch_aid2, ams_list) for ams, aids, aids1, aids2 in zip(ams_list, aids_list, aids1_list, aids2_list): edge_nodes = set(aids1 + aids2) ##if len(edge_nodes) != len(set(aids)): # #print('--') # #print('aids = %r' % (aids,)) # #print('edge_nodes = %r' % (edge_nodes,)) bad_aids = edge_nodes - set(aids) if len(bad_aids) > 0: print('bad_aids = %r' % (bad_aids,)) unlinked_aids = set(aids) - edge_nodes mst_links = list(ut.itertwo(list(unlinked_aids) + list(edge_nodes)[:1])) bad_aids.add(None) user_links = [(u, v) for (u, v) in zip(aids1, aids2) if u not in bad_aids and v not in bad_aids] new_edges = mst_links + user_links new_edges = [(int(u), int(v)) for u, v in new_edges if u not in bad_aids and v not in bad_aids] edges += new_edges info['edges'] = edges # Add edges between days grouped_aids1, grouped_aids2 = ut.take_column(visit_info_list, 'grouped_aids') nids_day1, nids_day2 = ut.take_column(visit_info_list, 'unique_nids') resight_nids = ut.isect(nids_day1, nids_day2) resight_aids1 = ut.take(grouped_aids1, resight_nids) resight_aids2 = ut.take(grouped_aids2, resight_nids) #resight_aids3 = [list(aids1) + list(aids2) for aids1, aids2 in zip(resight_aids1, resight_aids2)] ams_list = ibs.get_annotmatch_rowids_between_groups(resight_aids1, resight_aids2) aids1_list = ibs.unflat_map(ibs.get_annotmatch_aid1, ams_list) aids2_list = ibs.unflat_map(ibs.get_annotmatch_aid2, ams_list) between_edges = [] for ams, aids1, aids2, rawaids1, rawaids2 in zip(ams_list, aids1_list, aids2_list, resight_aids1, resight_aids2): link_aids = aids1 + aids2 rawaids3 = rawaids1 + rawaids2 badaids = ut.setdiff(link_aids, rawaids3) assert not badaids user_links = [(int(u), int(v)) for (u, v) in zip(aids1, aids2) if u is not None and v is not None] # HACK THIS OFF user_links = [] if len(user_links) == 0: # Hack in an edge between_edges += [(rawaids1[0], rawaids2[0])] else: between_edges += user_links assert np.all(0 == np.diff(np.array(ibs.unflat_map(ibs.get_annot_nids, between_edges)), axis=1)) import plottool_ibeis as pt import networkx as nx #pt.qt4ensure() #len(list(nx.connected_components(graph1))) #print(ut.graph_info(graph1)) # Layout graph layoutkw = dict( prog='neato', draw_implicit=False, splines='line', #splines='curved', #splines='spline', #sep=10 / 72, #prog='dot', rankdir='TB', ) def translate_graph_to_origin(graph): x, y, w, h = ut.get_graph_bounding_box(graph) ut.translate_graph(graph, (-x, -y)) def stack_graphs(graph_list, vert=False, pad=None): graph_list_ = [g.copy() for g in graph_list] for g in graph_list_: translate_graph_to_origin(g) bbox_list = [ut.get_graph_bounding_box(g) for g in graph_list_] if vert: dim1 = 3 dim2 = 2 else: dim1 = 2 dim2 = 3 dim1_list = np.array([bbox[dim1] for bbox in bbox_list]) dim2_list = np.array([bbox[dim2] for bbox in bbox_list]) if pad is None: pad = np.mean(dim1_list) / 2 offset1_list = ut.cumsum([0] + [d + pad for d in dim1_list[:-1]]) max_dim2 = max(dim2_list) offset2_list = [(max_dim2 - d2) / 2 for d2 in dim2_list] if vert: t_xy_list = [(d2, d1) for d1, d2 in zip(offset1_list, offset2_list)] else: t_xy_list = [(d1, d2) for d1, d2 in zip(offset1_list, offset2_list)] for g, t_xy in zip(graph_list_, t_xy_list): ut.translate_graph(g, t_xy) nx.set_node_attributes(g, name='pin', values='true') new_graph = nx.compose_all(graph_list_) #pt.show_nx(new_graph, layout='custom', node_labels=False, as_directed=False) # NOQA return new_graph # Construct graph for count, info in enumerate(visit_info_list): graph = nx.Graph() edges = [(int(u), int(v)) for u, v in info['edges'] if u is not None and v is not None] graph.add_edges_from(edges, attr_dict={'zorder': 10}) nx.set_node_attributes(graph, name='zorder', values=20) # Layout in neato _ = pt.nx_agraph_layout(graph, inplace=True, **layoutkw) # NOQA # Extract components and then flatten in nid ordering ccs = list(nx.connected_components(graph)) root_aids = [] cc_graphs = [] for cc_nodes in ccs: cc = graph.subgraph(cc_nodes) try: root_aids.append(list(ut.nx_source_nodes(cc.to_directed()))[0]) except nx.NetworkXUnfeasible: root_aids.append(list(cc.nodes())[0]) cc_graphs.append(cc) root_nids = ibs.get_annot_nids(root_aids) nid2_graph = dict(zip(root_nids, cc_graphs)) resight_nids_ = set(resight_nids).intersection(set(root_nids)) noresight_nids_ = set(root_nids) - resight_nids_ n_graph_list = ut.take(nid2_graph, sorted(noresight_nids_)) r_graph_list = ut.take(nid2_graph, sorted(resight_nids_)) if len(n_graph_list) > 0: n_graph = nx.compose_all(n_graph_list) _ = pt.nx_agraph_layout(n_graph, inplace=True, **layoutkw) # NOQA n_graphs = [n_graph] else: n_graphs = [] r_graphs = [stack_graphs(chunk) for chunk in ut.ichunks(r_graph_list, 100)] if count == 0: new_graph = stack_graphs(n_graphs + r_graphs, vert=True) else: new_graph = stack_graphs(r_graphs[::-1] + n_graphs, vert=True) #pt.show_nx(new_graph, layout='custom', node_labels=False, as_directed=False) # NOQA info['graph'] = new_graph graph1_, graph2_ = ut.take_column(visit_info_list, 'graph') if False: _ = pt.show_nx(graph1_, layout='custom', node_labels=False, as_directed=False) # NOQA _ = pt.show_nx(graph2_, layout='custom', node_labels=False, as_directed=False) # NOQA graph_list = [graph1_, graph2_] twoday_graph = stack_graphs(graph_list, vert=True, pad=None) nx.set_node_attributes(twoday_graph, name='pin', values='true') if debug: ut.nx_delete_None_edge_attr(twoday_graph) ut.nx_delete_None_node_attr(twoday_graph) print('twoday_graph(pre) info' + ut.repr3(ut.graph_info(twoday_graph), nl=2)) # Hack, no idea why there are nodes that dont exist here between_edges_ = [edge for edge in between_edges if twoday_graph.has_node(edge[0]) and twoday_graph.has_node(edge[1])] twoday_graph.add_edges_from(between_edges_, attr_dict={'alpha': .2, 'zorder': 0}) ut.nx_ensure_agraph_color(twoday_graph) layoutkw['splines'] = 'line' layoutkw['prog'] = 'neato' agraph = pt.nx_agraph_layout(twoday_graph, inplace=True, return_agraph=True, **layoutkw)[-1] # NOQA if False: fpath = ut.truepath('~/ggr_graph.png') agraph.draw(fpath) ut.startfile(fpath) if debug: print('twoday_graph(post) info' + ut.repr3(ut.graph_info(twoday_graph))) _ = pt.show_nx(twoday_graph, layout='custom', node_labels=False, as_directed=False) # NOQA
def estimate_twoday_count(ibs, day1, day2, filter_kw): #gid_list = ibs.get_valid_gids() all_images = ibs.images() dates = [dt.date() for dt in all_images.datetime] date_to_images = all_images.group_items(dates) date_to_images = ut.sort_dict(date_to_images) #date_hist = ut.map_dict_vals(len, date2_gids) #print('date_hist = %s' % (ut.repr2(date_hist, nl=2),)) verbose = 0 visit_dates = [day1, day2] visit_info_list_ = [] for day in visit_dates: images = date_to_images[day] aids = ut.flatten(images.aids) aids = ibs.filter_annots_general(aids, filter_kw=filter_kw, verbose=verbose) nids = ibs.get_annot_name_rowids(aids) grouped_aids = ut.group_items(aids, nids) unique_nids = ut.unique(list(grouped_aids.keys())) if False: aids_list = ut.take(grouped_aids, unique_nids) for aids in aids_list: if len(aids) > 30: break timedeltas_list = ibs.get_unflat_annots_timedelta_list(aids_list) # Do the five second rule marked_thresh = 5 flags = [] for nid, timedeltas in zip(unique_nids, timedeltas_list): flags.append(timedeltas.max() > marked_thresh) print('Unmarking %d names' % (len(flags) - sum(flags))) unique_nids = ut.compress(unique_nids, flags) grouped_aids = ut.dict_subset(grouped_aids, unique_nids) unique_aids = ut.flatten(list(grouped_aids.values())) info = { 'unique_nids': unique_nids, 'grouped_aids': grouped_aids, 'unique_aids': unique_aids, } visit_info_list_.append(info) # Estimate statistics from ibeis.other import dbinfo aids_day1, aids_day2 = ut.take_column(visit_info_list_, 'unique_aids') nids_day1, nids_day2 = ut.take_column(visit_info_list_, 'unique_nids') resight_nids = ut.isect(nids_day1, nids_day2) nsight1 = len(nids_day1) nsight2 = len(nids_day2) resight = len(resight_nids) lp_index, lp_error = dbinfo.sight_resight_count(nsight1, nsight2, resight) if False: from ibeis.other import dbinfo print('DAY 1 STATS:') _ = dbinfo.get_dbinfo(ibs, aid_list=aids_day1) # NOQA print('DAY 2 STATS:') _ = dbinfo.get_dbinfo(ibs, aid_list=aids_day2) # NOQA print('COMBINED STATS:') _ = dbinfo.get_dbinfo(ibs, aid_list=aids_day1 + aids_day2) # NOQA print('%d annots on day 1' % (len(aids_day1)) ) print('%d annots on day 2' % (len(aids_day2)) ) print('%d names on day 1' % (nsight1,)) print('%d names on day 2' % (nsight2,)) print('resight = %r' % (resight,)) print('lp_index = %r ± %r' % (lp_index, lp_error)) return nsight1, nsight2, resight, lp_index, lp_error
def double_depcache_graph(): r""" CommandLine: python -m ibeis.scripts.specialdraw double_depcache_graph --show --testmode python -m ibeis.scripts.specialdraw double_depcache_graph --save=figures5/doubledepc.png --dpath ~/latex/cand/ --diskshow --figsize=8,20 --dpi=220 --testmode --show --clipwhite python -m ibeis.scripts.specialdraw double_depcache_graph --save=figures5/doubledepc.png --dpath ~/latex/cand/ --diskshow --figsize=8,20 --dpi=220 --testmode --show --clipwhite --arrow-width=.5 python -m ibeis.scripts.specialdraw double_depcache_graph --save=figures5/doubledepc.png --dpath ~/latex/cand/ --diskshow --figsize=8,20 --dpi=220 --testmode --show --clipwhite --arrow-width=5 Example: >>> # DISABLE_DOCTEST >>> from ibeis.scripts.specialdraw import * # NOQA >>> result = double_depcache_graph() >>> print(result) >>> ut.quit_if_noshow() >>> import plottool as pt >>> ut.show_if_requested() """ import ibeis import networkx as nx import plottool as pt pt.ensure_pylab_qt4() # pt.plt.xkcd() ibs = ibeis.opendb('testdb1') reduced = True implicit = True annot_graph = ibs.depc_annot.make_graph(reduced=reduced, implicit=implicit) image_graph = ibs.depc_image.make_graph(reduced=reduced, implicit=implicit) to_rename = ut.isect(image_graph.nodes(), annot_graph.nodes()) nx.relabel_nodes(annot_graph, {x: 'annot_' + x for x in to_rename}, copy=False) nx.relabel_nodes(image_graph, {x: 'image_' + x for x in to_rename}, copy=False) graph = nx.compose_all([image_graph, annot_graph]) #graph = nx.union_all([image_graph, annot_graph], rename=('image', 'annot')) # userdecision = ut.nx_makenode(graph, 'user decision', shape='rect', color=pt.DARK_YELLOW, style='diagonals') # userdecision = ut.nx_makenode(graph, 'user decision', shape='circle', color=pt.DARK_YELLOW) userdecision = ut.nx_makenode( graph, 'User decision', shape='rect', #width=100, height=100, color=pt.YELLOW, style='diagonals') #longcat = True longcat = False #edge = ('feat', 'neighbor_index') #data = graph.get_edge_data(*edge)[0] #print('data = %r' % (data,)) #graph.remove_edge(*edge) ## hack #graph.add_edge('featweight', 'neighbor_index', **data) graph.add_edge('detections', userdecision, constraint=longcat, color=pt.PINK) graph.add_edge(userdecision, 'annotations', constraint=longcat, color=pt.PINK) # graph.add_edge(userdecision, 'annotations', implicit=True, color=[0, 0, 0]) if not longcat: pass #graph.add_edge('images', 'annotations', style='invis') #graph.add_edge('thumbnails', 'annotations', style='invis') #graph.add_edge('thumbnails', userdecision, style='invis') graph.remove_node('Has_Notch') graph.remove_node('annotmask') layoutkw = { 'ranksep': 5, 'nodesep': 5, 'dpi': 96, # 'nodesep': 1, } ns = 1000 ut.nx_set_default_node_attributes(graph, 'fontsize', 72) ut.nx_set_default_node_attributes(graph, 'fontname', 'Ubuntu') ut.nx_set_default_node_attributes(graph, 'style', 'filled') ut.nx_set_default_node_attributes(graph, 'width', ns * ut.PHI) ut.nx_set_default_node_attributes(graph, 'height', ns * (1 / ut.PHI)) #for u, v, d in graph.edge(data=True): for u, vkd in graph.edge.items(): for v, dk in vkd.items(): for k, d in dk.items(): localid = d.get('local_input_id') if localid: # d['headlabel'] = localid if localid not in ['1']: d['taillabel'] = localid #d['label'] = localid if d.get('taillabel') in {'1'}: del d['taillabel'] node_alias = { 'chips': 'Chip', 'images': 'Image', 'feat': 'Feat', 'featweight': 'Feat Weights', 'thumbnails': 'Thumbnail', 'detections': 'Detections', 'annotations': 'Annotation', 'Notch_Tips': 'Notch Tips', 'probchip': 'Prob Chip', 'Cropped_Chips': 'Croped Chip', 'Trailing_Edge': 'Trailing\nEdge', 'Block_Curvature': 'Block\nCurvature', # 'BC_DTW': 'block curvature /\n dynamic time warp', 'BC_DTW': 'DTW Distance', 'vsone': 'Hots vsone', 'feat_neighbs': 'Nearest\nNeighbors', 'neighbor_index': 'Neighbor\nIndex', 'vsmany': 'Hots vsmany', 'annot_labeler': 'Annot Labeler', 'labeler': 'Labeler', 'localizations': 'Localizations', 'classifier': 'Classifier', 'sver': 'Spatial\nVerification', 'Classifier': 'Existence', 'image_labeler': 'Image Labeler', } node_alias = { 'Classifier': 'existence', 'feat_neighbs': 'neighbors', 'sver': 'spatial_verification', 'Cropped_Chips': 'cropped_chip', 'BC_DTW': 'dtw_distance', 'Block_Curvature': 'curvature', 'Trailing_Edge': 'trailing_edge', 'Notch_Tips': 'notch_tips', 'thumbnails': 'thumbnail', 'images': 'image', 'annotations': 'annotation', 'chips': 'chip', #userdecision: 'User de' } node_alias = ut.delete_dict_keys( node_alias, ut.setdiff(node_alias.keys(), graph.nodes())) nx.relabel_nodes(graph, node_alias, copy=False) fontkw = dict(fontname='Ubuntu', fontweight='normal', fontsize=12) #pt.gca().set_aspect('equal') #pt.figure() pt.show_nx(graph, layoutkw=layoutkw, fontkw=fontkw) pt.zoom_factory()
def initialize_graph_and_model(infr): """ Unused in internal split stuff pt.qt4ensure() layout_info = pt.show_nx(graph, as_directed=False, fnum=1, layoutkw=dict(prog='neato'), use_image=True, verbose=0) ax = pt.gca() pt.zoom_factory() pt.interactions.PanEvents() """ #import networkx as nx #import itertools cm_list = infr.cm_list hack = True hack = False if hack: cm_list = cm_list[:10] qaid_list = [cm.qaid for cm in cm_list] daids_list = [cm.daid_list for cm in cm_list] unique_aids = sorted(ut.list_union(*daids_list + [qaid_list])) if hack: unique_aids = sorted(ut.isect(unique_aids, qaid_list)) aid2_aidx = ut.make_index_lookup(unique_aids) # Construct K-broken graph edges = [] edge_weights = [] #top = (infr.qreq_.qparams.K + 1) * 2 #top = (infr.qreq_.qparams.K) * 2 top = (infr.qreq_.qparams.K + 2) for count, cm in enumerate(cm_list): qidx = aid2_aidx[cm.qaid] score_list = cm.annot_score_list sortx = ut.argsort(score_list)[::-1] score_list = ut.take(score_list, sortx)[:top] daid_list = ut.take(cm.daid_list, sortx)[:top] for score, daid in zip(score_list, daid_list): if daid not in qaid_list: continue didx = aid2_aidx[daid] edge_weights.append(score) edges.append((qidx, didx)) # make symmetric directed_edges = dict(zip(edges, edge_weights)) # Find edges that point in both directions undirected_edges = {} for (u, v), w in directed_edges.items(): if (v, u) in undirected_edges: undirected_edges[(v, u)] += w undirected_edges[(v, u)] /= 2 else: undirected_edges[(u, v)] = w edges = list(undirected_edges.keys()) edge_weights = list(undirected_edges.values()) nodes = list(range(len(unique_aids))) nid_labeling = infr.qreq_.ibs.get_annot_nids(unique_aids) labeling = ut.rebase_labels(nid_labeling) import networkx as nx from ibeis.viz import viz_graph set_node_attrs = nx.set_node_attributes set_edge_attrs = nx.set_edge_attributes # Create match-based graph structure graph = nx.DiGraph() graph.add_nodes_from(nodes) graph.add_edges_from(edges) # Important properties nid_list = infr.qreq_.ibs.get_annot_nids(unique_aids) labeling = ut.rebase_labels(nid_list) set_node_attrs(graph, 'name_label', dict(zip(nodes, labeling))) set_edge_attrs(graph, 'weight', dict(zip(edges, edge_weights))) # Visualization properties import plottool as pt ax2_aid = ut.invert_dict(aid2_aidx) set_node_attrs(graph, 'aid', ax2_aid) viz_graph.ensure_node_images(infr.qreq_.ibs, graph) set_node_attrs(graph, 'framewidth', dict(zip(nodes, [3.0] * len(nodes)))) set_node_attrs(graph, 'framecolor', dict(zip(nodes, [pt.DARK_BLUE] * len(nodes)))) ut.color_nodes(graph, labelattr='name_label') edge_colors = pt.scores_to_color(np.array(edge_weights), cmap_='viridis') #import utool #utool.embed() #edge_colors = [pt.color_funcs.ensure_base255(color) for color in edge_colors] #print('edge_colors = %r' % (edge_colors,)) set_edge_attrs(graph, 'color', dict(zip(edges, edge_colors))) # Build inference model from ibeis.algo.hots import graph_iden #graph_iden.rrr() model = graph_iden.InfrModel(graph) #model = graph_iden.InfrModel(len(nodes), edges, edge_weights, labeling=labeling) infr.model = model
def get_injured_sharks(): """ >>> from wbia.scripts.getshark import * # NOQA """ import requests url = 'http://www.whaleshark.org/getKeywordImages.jsp' resp = requests.get(url) assert resp.status_code == 200 keywords = resp.json()['keywords'] key_list = ut.take_column(keywords, 'indexName') key_to_nice = {k['indexName']: k['readableName'] for k in keywords} injury_patterns = [ 'injury', 'net', 'hook', 'trunc', 'damage', 'scar', 'nicks', 'bite', ] injury_keys = [ key for key in key_list if any([pat in key for pat in injury_patterns]) ] noninjury_keys = ut.setdiff(key_list, injury_keys) injury_nice = ut.lmap(lambda k: key_to_nice[k], injury_keys) # NOQA noninjury_nice = ut.lmap(lambda k: key_to_nice[k], noninjury_keys) # NOQA key_list = injury_keys keyed_images = {} for key in ut.ProgIter(key_list, lbl='reading index', bs=True): key_url = url + '?indexName={indexName}'.format(indexName=key) key_resp = requests.get(key_url) assert key_resp.status_code == 200 key_imgs = key_resp.json()['images'] keyed_images[key] = key_imgs key_hist = {key: len(imgs) for key, imgs in keyed_images.items()} key_hist = ut.sort_dict(key_hist, 'vals') logger.info(ut.repr3(key_hist)) nice_key_hist = ut.map_dict_keys(lambda k: key_to_nice[k], key_hist) nice_key_hist = ut.sort_dict(nice_key_hist, 'vals') logger.info(ut.repr3(nice_key_hist)) key_to_urls = { key: ut.take_column(vals, 'url') for key, vals in keyed_images.items() } overlaps = {} import itertools overlap_img_list = [] for k1, k2 in itertools.combinations(key_to_urls.keys(), 2): overlap_imgs = ut.isect(key_to_urls[k1], key_to_urls[k2]) num_overlap = len(overlap_imgs) overlaps[(k1, k2)] = num_overlap overlaps[(k1, k1)] = len(key_to_urls[k1]) if num_overlap > 0: # logger.info('[%s][%s], overlap=%r' % (k1, k2, num_overlap)) overlap_img_list.extend(overlap_imgs) all_img_urls = list(set(ut.flatten(key_to_urls.values()))) num_all = len(all_img_urls) # NOQA logger.info('num_all = %r' % (num_all, )) # Determine super-categories categories = ['nicks', 'scar', 'trunc'] # Force these keys into these categories key_to_cat = {'scarbite': 'other_injury'} cat_to_keys = ut.ddict(list) for key in key_to_urls.keys(): flag = 1 if key in key_to_cat: cat = key_to_cat[key] cat_to_keys[cat].append(key) continue for cat in categories: if cat in key: cat_to_keys[cat].append(key) flag = 0 if flag: cat = 'other_injury' cat_to_keys[cat].append(key) cat_urls = ut.ddict(list) for cat, keys in cat_to_keys.items(): for key in keys: cat_urls[cat].extend(key_to_urls[key]) cat_hist = {} for cat in list(cat_urls.keys()): cat_urls[cat] = list(set(cat_urls[cat])) cat_hist[cat] = len(cat_urls[cat]) logger.info(ut.repr3(cat_to_keys)) logger.info(ut.repr3(cat_hist)) key_to_cat = dict([(val, key) for key, vals in cat_to_keys.items() for val in vals]) # ingestset = { # '__class__': 'ImageSet', # 'images': ut.ddict(dict) # } # for key, key_imgs in keyed_images.items(): # for imgdict in key_imgs: # url = imgdict['url'] # encid = imgdict['correspondingEncounterNumber'] # # Make structure # encdict = encounters[encid] # encdict['__class__'] = 'Encounter' # imgdict = ut.delete_keys(imgdict.copy(), ['correspondingEncounterNumber']) # imgdict['__class__'] = 'Image' # cat = key_to_cat[key] # annotdict = {'relative_bbox': [.01, .01, .98, .98], 'tags': [cat, key]} # annotdict['__class__'] = 'Annotation' # # Ensure structures exist # encdict['images'] = encdict.get('images', []) # imgdict['annots'] = imgdict.get('annots', []) # # Add an image to this encounter # encdict['images'].append(imgdict) # # Add an annotation to this image # imgdict['annots'].append(annotdict) # # http://springbreak.wildbook.org/rest/org.ecocean.Encounter/1111 # get_enc_url = 'http://www.whaleshark.org/rest/org.ecocean.Encounter/%s' % (encid,) # resp = requests.get(get_enc_url) # logger.info(ut.repr3(encdict)) # logger.info(ut.repr3(encounters)) # Download the files to the local disk # fpath_list = all_urls = ut.unique( ut.take_column( ut.flatten( ut.dict_subset(keyed_images, ut.flatten(cat_to_keys.values())).values()), 'url', )) dldir = ut.truepath('~/tmpsharks') from os.path import commonprefix, basename # NOQA prefix = commonprefix(all_urls) suffix_list = [url_[len(prefix):] for url_ in all_urls] fname_list = [suffix.replace('/', '--') for suffix in suffix_list] fpath_list = [] for url, fname in ut.ProgIter(zip(all_urls, fname_list), lbl='downloading imgs', freq=1): fpath = ut.grab_file_url(url, download_dir=dldir, fname=fname, verbose=False) fpath_list.append(fpath) # Make sure we keep orig info # url_to_keys = ut.ddict(list) url_to_info = ut.ddict(dict) for key, imgdict_list in keyed_images.items(): for imgdict in imgdict_list: url = imgdict['url'] info = url_to_info[url] for k, v in imgdict.items(): info[k] = info.get(k, []) info[k].append(v) info['keys'] = info.get('keys', []) info['keys'].append(key) # url_to_keys[url].append(key) info_list = ut.take(url_to_info, all_urls) for info in info_list: if len(set(info['correspondingEncounterNumber'])) > 1: assert False, 'url with two different encounter nums' # Combine duplicate tags hashid_list = [ ut.get_file_uuid(fpath_, stride=8) for fpath_ in ut.ProgIter(fpath_list, bs=True) ] groupxs = ut.group_indices(hashid_list)[1] # Group properties by duplicate images # groupxs = [g for g in groupxs if len(g) > 1] fpath_list_ = ut.take_column(ut.apply_grouping(fpath_list, groupxs), 0) url_list_ = ut.take_column(ut.apply_grouping(all_urls, groupxs), 0) info_list_ = [ ut.map_dict_vals(ut.flatten, ut.dict_accum(*info_)) for info_ in ut.apply_grouping(info_list, groupxs) ] encid_list_ = [ ut.unique(info_['correspondingEncounterNumber'])[0] for info_ in info_list_ ] keys_list_ = [ut.unique(info_['keys']) for info_ in info_list_] cats_list_ = [ut.unique(ut.take(key_to_cat, keys)) for keys in keys_list_] clist = ut.ColumnLists({ 'gpath': fpath_list_, 'url': url_list_, 'encid': encid_list_, 'key': keys_list_, 'cat': cats_list_, }) # for info_ in ut.apply_grouping(info_list, groupxs): # info = ut.dict_accum(*info_) # info = ut.map_dict_vals(ut.flatten, info) # x = ut.unique(ut.flatten(ut.dict_accum(*info_)['correspondingEncounterNumber'])) # if len(x) > 1: # info = info.copy() # del info['keys'] # logger.info(ut.repr3(info)) flags = ut.lmap(ut.fpath_has_imgext, clist['gpath']) clist = clist.compress(flags) import wbia ibs = wbia.opendb('WS_Injury', allow_newdir=True) gid_list = ibs.add_images(clist['gpath']) clist['gid'] = gid_list failed_flags = ut.flag_None_items(clist['gid']) logger.info('# failed %s' % (sum(failed_flags), )) passed_flags = ut.not_list(failed_flags) clist = clist.compress(passed_flags) ut.assert_all_not_None(clist['gid']) # ibs.get_image_uris_original(clist['gid']) ibs.set_image_uris_original(clist['gid'], clist['url'], overwrite=True) # ut.zipflat(clist['cat'], clist['key']) if False: # Can run detection instead clist['tags'] = ut.zipflat(clist['cat']) aid_list = ibs.use_images_as_annotations(clist['gid'], adjust_percent=0.01, tags_list=clist['tags']) aid_list import wbia.plottool as pt from wbia import core_annots pt.qt4ensure() # annots = ibs.annots() # aids = [1, 2] # ibs.depc_annot.get('hog', aids , 'hog') # ibs.depc_annot.get('chip', aids, 'img') for aid in ut.InteractiveIter(ibs.get_valid_aids()): hogs = ibs.depc_annot.d.get_hog_hog([aid]) chips = ibs.depc_annot.d.get_chips_img([aid]) chip = chips[0] hogimg = core_annots.make_hog_block_image(hogs[0]) pt.clf() pt.imshow(hogimg, pnum=(1, 2, 1)) pt.imshow(chip, pnum=(1, 2, 2)) fig = pt.gcf() fig.show() fig.canvas.draw() # logger.info(len(groupxs)) # if False: # groupxs = ut.find_duplicate_items(ut.lmap(basename, suffix_list)).values() # logger.info(ut.repr3(ut.apply_grouping(all_urls, groupxs))) # # FIX # for fpath, fname in zip(fpath_list, fname_list): # if ut.checkpath(fpath): # ut.move(fpath, join(dirname(fpath), fname)) # logger.info('fpath = %r' % (fpath,)) # import wbia # from wbia.dbio import ingest_dataset # dbdir = wbia.sysres.lookup_dbdir('WS_ALL') # self = ingest_dataset.Ingestable2(dbdir) if False: # Show overlap matrix import wbia.plottool as pt import pandas as pd import numpy as np dict_ = overlaps s = pd.Series(dict_, index=pd.MultiIndex.from_tuples(overlaps)) df = s.unstack() lhs, rhs = df.align(df.T) df = lhs.add(rhs, fill_value=0).fillna(0) label_texts = df.columns.values def label_ticks(label_texts): import wbia.plottool as pt truncated_labels = [repr(lbl[0:100]) for lbl in label_texts] ax = pt.gca() ax.set_xticks(list(range(len(label_texts)))) ax.set_xticklabels(truncated_labels) [lbl.set_rotation(-55) for lbl in ax.get_xticklabels()] [ lbl.set_horizontalalignment('left') for lbl in ax.get_xticklabels() ] # xgrid, ygrid = np.meshgrid(range(len(label_texts)), range(len(label_texts))) # pt.plot_surface3d(xgrid, ygrid, disjoint_mat) ax.set_yticks(list(range(len(label_texts)))) ax.set_yticklabels(truncated_labels) [ lbl.set_horizontalalignment('right') for lbl in ax.get_yticklabels() ] [ lbl.set_verticalalignment('center') for lbl in ax.get_yticklabels() ] # [lbl.set_rotation(20) for lbl in ax.get_yticklabels()] # df = df.sort(axis=0) # df = df.sort(axis=1) sortx = np.argsort(df.sum(axis=1).values)[::-1] df = df.take(sortx, axis=0) df = df.take(sortx, axis=1) fig = pt.figure(fnum=1) fig.clf() mat = df.values.astype(np.int32) mat[np.diag_indices(len(mat))] = 0 vmax = mat[(1 - np.eye(len(mat))).astype(np.bool)].max() import matplotlib.colors norm = matplotlib.colors.Normalize(vmin=0, vmax=vmax, clip=True) pt.plt.imshow(mat, cmap='hot', norm=norm, interpolation='none') pt.plt.colorbar() pt.plt.grid('off') label_ticks(label_texts) fig.tight_layout() # overlap_df = pd.DataFrame.from_dict(overlap_img_list) class TmpImage(ut.NiceRepr): pass from skimage.feature import hog from skimage import data, color, exposure import wbia.plottool as pt image2 = color.rgb2gray(data.astronaut()) # NOQA fpath = './GOPR1120.JPG' import vtool as vt for fpath in [fpath]: """ http://scikit-image.org/docs/dev/auto_examples/plot_hog.html """ image = vt.imread(fpath, grayscale=True) image = pt.color_funcs.to_base01(image) fig = pt.figure(fnum=2) fd, hog_image = hog( image, orientations=8, pixels_per_cell=(16, 16), cells_per_block=(1, 1), visualise=True, ) fig, (ax1, ax2) = pt.plt.subplots(1, 2, figsize=(8, 4), sharex=True, sharey=True) ax1.axis('off') ax1.imshow(image, cmap=pt.plt.cm.gray) ax1.set_title('Input image') ax1.set_adjustable('box-forced') # Rescale histogram for better display hog_image_rescaled = exposure.rescale_intensity(hog_image, in_range=(0, 0.02)) ax2.axis('off') ax2.imshow(hog_image_rescaled, cmap=pt.plt.cm.gray) ax2.set_title('Histogram of Oriented Gradients') ax1.set_adjustable('box-forced') pt.plt.show()
def double_depcache_graph(): r""" CommandLine: python -m ibeis.scripts.specialdraw double_depcache_graph --show --testmode python -m ibeis.scripts.specialdraw double_depcache_graph --save=figures5/doubledepc.png --dpath ~/latex/cand/ --diskshow --figsize=8,20 --dpi=220 --testmode --show --clipwhite python -m ibeis.scripts.specialdraw double_depcache_graph --save=figures5/doubledepc.png --dpath ~/latex/cand/ --diskshow --figsize=8,20 --dpi=220 --testmode --show --clipwhite --arrow-width=.5 python -m ibeis.scripts.specialdraw double_depcache_graph --save=figures5/doubledepc.png --dpath ~/latex/cand/ --diskshow --figsize=8,20 --dpi=220 --testmode --show --clipwhite --arrow-width=5 Example: >>> # DISABLE_DOCTEST >>> from ibeis.scripts.specialdraw import * # NOQA >>> result = double_depcache_graph() >>> print(result) >>> ut.quit_if_noshow() >>> import plottool as pt >>> ut.show_if_requested() """ import ibeis import networkx as nx import plottool as pt pt.ensure_pylab_qt4() # pt.plt.xkcd() ibs = ibeis.opendb('testdb1') reduced = True implicit = True annot_graph = ibs.depc_annot.make_graph(reduced=reduced, implicit=implicit) image_graph = ibs.depc_image.make_graph(reduced=reduced, implicit=implicit) to_rename = ut.isect(image_graph.nodes(), annot_graph.nodes()) nx.relabel_nodes(annot_graph, {x: 'annot_' + x for x in to_rename}, copy=False) nx.relabel_nodes(image_graph, {x: 'image_' + x for x in to_rename}, copy=False) graph = nx.compose_all([image_graph, annot_graph]) #graph = nx.union_all([image_graph, annot_graph], rename=('image', 'annot')) # userdecision = ut.nx_makenode(graph, 'user decision', shape='rect', color=pt.DARK_YELLOW, style='diagonals') # userdecision = ut.nx_makenode(graph, 'user decision', shape='circle', color=pt.DARK_YELLOW) userdecision = ut.nx_makenode(graph, 'User decision', shape='rect', #width=100, height=100, color=pt.YELLOW, style='diagonals') #longcat = True longcat = False #edge = ('feat', 'neighbor_index') #data = graph.get_edge_data(*edge)[0] #print('data = %r' % (data,)) #graph.remove_edge(*edge) ## hack #graph.add_edge('featweight', 'neighbor_index', **data) graph.add_edge('detections', userdecision, constraint=longcat, color=pt.PINK) graph.add_edge(userdecision, 'annotations', constraint=longcat, color=pt.PINK) # graph.add_edge(userdecision, 'annotations', implicit=True, color=[0, 0, 0]) if not longcat: pass #graph.add_edge('images', 'annotations', style='invis') #graph.add_edge('thumbnails', 'annotations', style='invis') #graph.add_edge('thumbnails', userdecision, style='invis') graph.remove_node('Has_Notch') graph.remove_node('annotmask') layoutkw = { 'ranksep': 5, 'nodesep': 5, 'dpi': 96, # 'nodesep': 1, } ns = 1000 ut.nx_set_default_node_attributes(graph, 'fontsize', 72) ut.nx_set_default_node_attributes(graph, 'fontname', 'Ubuntu') ut.nx_set_default_node_attributes(graph, 'style', 'filled') ut.nx_set_default_node_attributes(graph, 'width', ns * ut.PHI) ut.nx_set_default_node_attributes(graph, 'height', ns * (1 / ut.PHI)) #for u, v, d in graph.edge(data=True): for u, vkd in graph.edge.items(): for v, dk in vkd.items(): for k, d in dk.items(): localid = d.get('local_input_id') if localid: # d['headlabel'] = localid if localid not in ['1']: d['taillabel'] = localid #d['label'] = localid if d.get('taillabel') in {'1'}: del d['taillabel'] node_alias = { 'chips': 'Chip', 'images': 'Image', 'feat': 'Feat', 'featweight': 'Feat Weights', 'thumbnails': 'Thumbnail', 'detections': 'Detections', 'annotations': 'Annotation', 'Notch_Tips': 'Notch Tips', 'probchip': 'Prob Chip', 'Cropped_Chips': 'Croped Chip', 'Trailing_Edge': 'Trailing\nEdge', 'Block_Curvature': 'Block\nCurvature', # 'BC_DTW': 'block curvature /\n dynamic time warp', 'BC_DTW': 'DTW Distance', 'vsone': 'Hots vsone', 'feat_neighbs': 'Nearest\nNeighbors', 'neighbor_index': 'Neighbor\nIndex', 'vsmany': 'Hots vsmany', 'annot_labeler': 'Annot Labeler', 'labeler': 'Labeler', 'localizations': 'Localizations', 'classifier': 'Classifier', 'sver': 'Spatial\nVerification', 'Classifier': 'Existence', 'image_labeler': 'Image Labeler', } node_alias = { 'Classifier': 'existence', 'feat_neighbs': 'neighbors', 'sver': 'spatial_verification', 'Cropped_Chips': 'cropped_chip', 'BC_DTW': 'dtw_distance', 'Block_Curvature': 'curvature', 'Trailing_Edge': 'trailing_edge', 'Notch_Tips': 'notch_tips', 'thumbnails': 'thumbnail', 'images': 'image', 'annotations': 'annotation', 'chips': 'chip', #userdecision: 'User de' } node_alias = ut.delete_dict_keys(node_alias, ut.setdiff(node_alias.keys(), graph.nodes())) nx.relabel_nodes(graph, node_alias, copy=False) fontkw = dict(fontname='Ubuntu', fontweight='normal', fontsize=12) #pt.gca().set_aspect('equal') #pt.figure() pt.show_nx(graph, layoutkw=layoutkw, fontkw=fontkw) pt.zoom_factory()