def getting_edge_weights(file_curr, out_dir_labels,out_dir,k, set_k = set_k_mul, normalize_k = normalize_k_mul): npy_files, anno_all = readTrainTestFile(file_curr) k_count = np.zeros((len(class_names),k,k)) k_count_big = np.zeros((k,k)) for npy_file,anno_curr in zip(npy_files,anno_all): label_file = os.path.join(out_dir_labels, os.path.split(npy_file)[1]) labels = np.load(label_file) k_count_big = set_k(k_count_big,labels) for gt_idx in np.where(anno_curr)[0]: k_count[gt_idx] = set_k(k_count[gt_idx],labels) k_count_big = normalize_k(k_count_big) print k_count_big.shape out_file = os.path.join(out_dir,'all_classes_mul.npy') np.save(out_file, k_count_big ) out_file = os.path.join(out_dir,'all_classes_mul.jpg') visualize.saveMatAsImage(k_count_big, out_file) for class_idx in range(len(class_names)): k_count[class_idx] = normalize_k(k_count[class_idx]) class_name = class_names[class_idx] out_file = os.path.join(out_dir,class_name+'.npy') np.save(out_file, k_count[class_idx]) out_file = os.path.join(out_dir,class_name+'.jpg') visualize.saveMatAsImage(k_count[class_idx], out_file) visualize.writeHTMLForFolder(out_dir)
def plot_all_necessaries(just_vid_name, out_dir_cooc_viz, out_dir_fg, out_dir_bg, gt_arr_row, gt_arr_col, arr_cooc): # just_vid_name = vid_name[:vid_name.rindex('.')] out_file_cooc = os.path.join(out_dir_cooc_viz, just_vid_name + '.jpg') out_file_fg = os.path.join(out_dir_fg, just_vid_name + '.jpg') out_file_bg = os.path.join(out_dir_bg, just_vid_name + '.jpg') # eye = -2*np.eye(arr_cooc.shape[0]) # arr_cooc_h = arr_cooc+eye fg_all = arr_cooc[gt_arr_row > 0, :] fg_fg = fg_all[:, gt_arr_col > 0] bg_all = arr_cooc[gt_arr_row == 0, :] bg_bg = bg_all[:, gt_arr_col == 0] num_bins = np.arange(0, 1.1, .1) all_vals = [val.flatten() for val in [fg_all, fg_fg, bg_all, bg_bg]] legend_entries = ['FG All', 'FG FG', 'BG All', 'BG BG'] xlabel = 'Cooc Value' ylabel = 'Frequency' xtick_labels = ['%.1f' % val for val in num_bins] title = 'Foreground Hist for ' + just_vid_name visualize.plotMultiHist(out_file_fg, vals=all_vals[:2], num_bins=[num_bins, num_bins], legend_entries=legend_entries[:2], title=title, xlabel=xlabel, ylabel=ylabel, xticks=xtick_labels, density=True, align='mid') title = 'Foreground Hist for ' + just_vid_name visualize.plotMultiHist(out_file_bg, vals=all_vals[2:], num_bins=[num_bins, num_bins], legend_entries=legend_entries[2:], title=title, xlabel=xlabel, ylabel=ylabel, xticks=xtick_labels, density=True, align='mid') title = 'Mat ' + just_vid_name arr_cooc_mat = arr_cooc arr_cooc_mat[arr_cooc_mat < 0] = 0 visualize.saveMatAsImage(arr_cooc_mat, out_file_cooc, title=title) print out_file_cooc
def save_graphs_to_look_at(model_file, graph_nums): out_dir_meta = model_file[:model_file.rindex('.')] out_dir_meta_meta = out_dir_meta + '_graph_etc' out_dir_viz = out_dir_meta_meta + '_viz' util.mkdir(out_dir_viz) for graph_num in graph_nums: out_dir_meta = out_dir_meta_meta + '_' + str(graph_num) assert os.path.exists(out_dir_meta) vid_files = glob.glob(os.path.join(out_dir_meta, '*test*.npz')) for vid_file in vid_files: npz_data = np.load(vid_file) vid_file = os.path.split(vid_file)[1] affinity = npz_data['affinity'] gt_vecs = npz_data['gt_vecs'] gt_classes = npz_data['gt_classes'] x_all = npz_data['x_all'] plotter = [] legend_entries = [] for gt_idx, gt_class in enumerate(gt_classes): gt_vec = gt_vecs[gt_idx] val_rel = x_all[0, :, gt_class] gt_vec = gt_vec / np.max(gt_vec) gt_vec = gt_vec * np.max(val_rel) # (gt_idx+1) x_axis = range(gt_vec.size) plotter.append((x_axis, gt_vec)) plotter.append((x_axis, val_rel)) legend_entries.append(class_names[gt_class]) legend_entries.append(class_names[gt_class] + ' pred') out_file = os.path.join( out_dir_viz, vid_file[:vid_file.rindex('.')] + '_gt.jpg') visualize.plotSimple(plotter, out_file=out_file, xlabel='time', ylabel='', legend_entries=legend_entries, outside=True) out_file = os.path.join( out_dir_viz, vid_file[:vid_file.rindex('.')] + '_' + str(graph_num) + '.jpg') visualize.saveMatAsImage(affinity, out_file) visualize.writeHTMLForFolder(out_dir_viz)
def save_neg_cooc_graphs(out_dir): all_file = os.path.join(out_dir,'all_classes_mul.npy') all_cooc = np.load(all_file) for class_name in class_names: in_file = os.path.join(out_dir,class_name+'.npy') curr_cooc = np.load(in_file) out_cooc = curr_cooc - all_cooc out_cooc = out_cooc + np.eye(out_cooc.shape[0]) out_file = os.path.join(out_dir,class_name+'neg.jpg') visualize.saveMatAsImage(out_cooc, out_file) # print 'curr_cooc',curr_cooc.shape,np.min(curr_cooc),np.max(curr_cooc) # print 'out_cooc',out_cooc.shape,np.min(out_cooc),np.max(out_cooc) # print 'all_cooc',all_cooc.shape,np.min(all_cooc),np.max(all_cooc) # print out_file out_file = os.path.join(out_dir,class_name+'neg.npy') np.save(out_file, out_cooc) visualize.writeHTMLForFolder(out_dir)
def save_neg_exp_cooc_graphs(out_dir): for class_name in class_names: in_file = os.path.join(out_dir,class_name+'neg.npy') curr_cooc = np.load(in_file) print np.min(curr_cooc),np.max(curr_cooc) out_cooc = np.exp(curr_cooc-1) print np.min(out_cooc),np.max(out_cooc) out_file = os.path.join(out_dir,class_name+'negexp.jpg') visualize.saveMatAsImage(out_cooc, out_file) # print out_file # print 'curr_cooc',curr_cooc.shape,np.min(curr_cooc),np.max(curr_cooc) # print 'out_cooc',out_cooc.shape,np.min(out_cooc),np.max(out_cooc) # print 'all_cooc',all_cooc.shape,np.min(all_cooc),np.max(all_cooc) out_file = os.path.join(out_dir,class_name+'negexp.npy') print out_file np.save(out_file, out_cooc) # raw_input() visualize.writeHTMLForFolder(out_dir)
def save_sim_viz(vid_name, out_shape_curr, sim_mat, class_idx, out_dir, dataset='ucf'): gt_vals, det_times = get_gt_vector(vid_name, out_shape_curr, class_idx, dataset=dataset) if dataset.startswith('activitynet'): class_names = globals.class_names_activitynet else: class_names = globals.class_names out_dir_curr = os.path.join(out_dir, class_names[class_idx]) util.mkdir(out_dir_curr) pos_rows = sim_mat[gt_vals > 0, :] pos_rows = np.mean(pos_rows, axis=0) neg_rows = sim_mat[gt_vals < 1, :] neg_rows = np.mean(neg_rows, axis=0) # for idx_pos_row, pos_row in enumerate(pos_rows): max_val = max(np.max(pos_rows), np.max(neg_rows)) gt_vals_curr = gt_vals * max_val arr_plot = [(det_times, curr_arr) for curr_arr in [gt_vals_curr, pos_rows, neg_rows]] legend_entries = ['gt', 'pos', 'neg'] # idx_pos_row = str(idx_pos_row) out_file_curr = os.path.join(out_dir_curr, vid_name + '.jpg') title = vid_name # +' '+idx_pos_row # visualize.plotSimple(arr_plot, out_file = out_file_curr, title = title, xlabel = 'time', ylabel = 'max sim', legend_entries = legend_entries) # print out_file_curr # print np.save(out_file_curr.replace('.jpg', '.npy'), sim_mat) visualize.saveMatAsImage(sim_mat, out_file_curr, title=title)
def get_distance_from_perfect(model_file, graph_num): out_dir_meta = model_file[:model_file.rindex('.')] out_dir_meta_meta = out_dir_meta + '_graph_etc' out_dir_meta = out_dir_meta_meta + '_' + str(graph_num) out_dir_viz = out_dir_meta + '_dist_perfectG' print out_dir_viz util.mkdir(out_dir_viz) assert os.path.exists(out_dir_meta) vid_files = glob.glob(os.path.join(out_dir_meta, '*validation*.npz')) class_collations = [[] for idx in range(len(class_names))] class_collations_pos = [[] for idx in range(len(class_names))] viz = True threshes = np.arange(0.1, 1.1, 0.1) print threshes for vid_file in vid_files: print vid_file npz_data = np.load(vid_file) vid_file = os.path.split(vid_file)[1] affinity = npz_data['affinity'] gt_vecs = npz_data['gt_vecs'] gt_classes = npz_data['gt_classes'] if viz: out_file = os.path.join( out_dir_viz, vid_file[:vid_file.rindex('.')] + '_' + str(graph_num) + '.jpg') visualize.saveMatAsImage(affinity, out_file) plotter = [] legend_entries = [] for gt_idx, gt_class in enumerate(gt_classes): gt_vec = gt_vecs[gt_idx] gt_vec = gt_vec / np.max(gt_vec) gt_vec = gt_vec * (gt_idx + 1) x_axis = range(gt_vec.size) plotter.append((x_axis, gt_vec)) legend_entries.append(class_names[gt_class]) out_file = os.path.join( out_dir_viz, vid_file[:vid_file.rindex('.')] + '_gt.jpg') visualize.plotSimple(plotter, out_file=out_file, xlabel='time', ylabel='', legend_entries=legend_entries) for idx_gt, gt_vec in enumerate(gt_vecs): gt_class = gt_classes[idx_gt] class_name = class_names[gt_class] gt_vec = gt_vec[:, np.newaxis] perfectG = np.dot(gt_vec, gt_vec.T) aff = np.array(affinity) aff_just_pos = aff * perfectG diff = get_l2_diff(aff, perfectG, threshes) diff_pos = get_l2_diff(aff_just_pos, perfectG, threshes) class_collations[gt_class].append(diff) class_collations_pos[gt_class].append(diff_pos) plotter = [(threshes, diff), (threshes, diff_pos)] legend_entries = ['All', 'Pos'] out_file = os.path.join( out_dir_viz, vid_file[:vid_file.rindex('.')] + '_' + class_name + '_diff.jpg') visualize.plotSimple(plotter, out_file=out_file, xlabel='Thresh', ylabel='Diff', legend_entries=legend_entries) if viz: out_file = os.path.join( out_dir_viz, vid_file[:vid_file.rindex('.')] + '_' + class_name + '_perfectG.jpg') visualize.saveMatAsImage(perfectG, out_file) visualize.writeHTMLForFolder(out_dir_viz) for idx_class in range(len(class_names)): class_name = class_names[idx_class] cc = np.array(class_collations[idx_class]) ccp = np.array(class_collations_pos[idx_class]) cc = np.mean(cc, axis=0) ccp = np.mean(ccp, axis=0) plotter = [(threshes, cc), (threshes, ccp)] legend_entries = ['All', 'Pos'] out_file = os.path.join(out_dir_viz, 'average_' + class_name + '_diff.jpg') visualize.plotSimple(plotter, out_file=out_file, title=class_name, xlabel='Thresh', ylabel='Diff', legend_entries=legend_entries) visualize.writeHTMLForFolder(out_dir_viz)
def check_graph(): # model_file = '../experiments/graph_multi_video_pretrained_F_flexible_alt_temp_train_normalize_True_True_non_lin_HT_sparsify_True_num_switch_5_5_graph_size_32_focus_1_deno_8_n_classes_20_in_out_2048_64_2048_64_method_cos_pretrained_ucf_ucf/all_classes_False_just_primary_False_limit_500_cw_True_MultiCrossEntropy_500_step_500_0.1_0.0001_0.001_0.001_FIXED/model_199.pt' model_file = '../experiments/graph_multi_video_pretrained_F_flexible_alt_train_temp_normalize_True_True_non_lin_HT_sparsify_True_num_switch_5_5_graph_size_32_focus_1_deno_8_n_classes_20_in_out_2048_64_2048_64_method_cos_pretrained_ucf_ucf/all_classes_False_just_primary_False_limit_500_cw_True_MultiCrossEntropy_500_step_500_0.1_0.0001_0.001_0.001_FIXED/model_299.pt' model_file = '../experiments/graph_multi_video_pretrained_F_flexible_alt_train_temp_normalize_True_True_non_lin_HT_sparsify_True_num_switch_5_5_graph_size_2_focus_1_deno_8_n_classes_20_in_out_2048_64_2048_64_method_cos_pretrained_ucf_ucf/all_classes_False_just_primary_False_limit_500_cw_True_MultiCrossEntropy_500_step_500_0.1_0.0001_0.001_0.001_ABS/model_499.pt' model_file = '../experiments/graph_multi_video_multi_F_joint_train_normalize_True_True_non_lin_HT_sparsify_True_graph_size_2_deno_8_n_classes_20_in_out_2048_64_feat_dim_2048_64_method_cos_ucf/all_classes_False_just_primary_False_limit_500_cw_True_MultiCrossEntropyMultiBranch_300_step_300_0.1_0.001_0.001_lw_0.5_0.5_ABS/model_299.pt' model = torch.load(model_file).cuda() model.eval() train_data, test_train_data, test_data, n_classes, trim_preds = emb.get_data( 'ucf', 500, False, just_primary=False, gt_vec=False) # test_data = train_data # test_bool = False # test_data.feature_limit = None batch_size = 1 branch_to_test = 1 test_bool = True out_dir_meta = model_file[:model_file.rindex('.')] out_dir_meta = out_dir_meta + '_visualizing_' + str(branch_to_test) util.mkdir(out_dir_meta) print out_dir_meta anno_file = test_data.anno_file vid_names, annos = readTrainTestFile(anno_file) test_dataloader = torch.utils.data.DataLoader( test_data, batch_size=batch_size, collate_fn=test_data.collate_fn, shuffle=False, num_workers=1) import torch.nn.functional as F preds = [] labels = [] for idx_data, data in enumerate(test_dataloader): gt_classes = np.where(annos[idx_data])[0] vid_name = os.path.split(vid_names[idx_data])[1] vid_name = vid_name[:vid_name.rindex('.')] out_dir_curr = os.path.join(out_dir_meta, vid_name) util.mkdir(out_dir_curr) label = data['label'].cpu().data.numpy() affinity = model.get_similarity(data['features'], sparsify=True) x_all, pmf = model(data['features'], branch_to_test=branch_to_test) assert len(pmf) == 1 x_all = torch.cat([x_all_curr.unsqueeze(0) for x_all_curr in x_all], 0) x_all = F.softmax(x_all, dim=1) x_all = x_all.data.cpu().numpy() affinity = affinity.data.cpu().numpy() for gt_class in gt_classes: affinity_copy = np.array(affinity) x_rel = x_all[:, gt_class] thresh = np.max(x_rel) - (np.max(x_rel) - np.min(x_rel)) * 0.5 gt_vec, _ = get_gt_vector(vid_name, x_rel.shape[0], gt_class, test=test_bool) if np.sum(gt_vec) == 0: 'we got an anno problem', vid_name continue bin_keep = gt_vec.astype(int) bin_keep_rot = np.roll(bin_keep, 1) bin_keep_rot[0] = 0 diff = bin_keep - bin_keep_rot idx_start_all = list(np.where(diff == 1)[0]) idx_end_all = list(np.where(diff == -1)[0]) idx_borders = np.array(idx_start_all + idx_end_all) affinity_copy[:, idx_borders] = np.max(affinity_copy) affinity_copy[idx_borders, :] = np.max(affinity_copy) gt_vec = gt_vec * np.max(x_rel) x_axis = range(x_rel.size) thresh = thresh * np.ones(x_rel.shape) out_file_curr = os.path.join( out_dir_curr, 'det_confs_' + class_names[gt_class] + '.jpg') visualize.plotSimple([(x_axis, x_rel), (x_axis, gt_vec), (x_axis, thresh)], out_file=out_file_curr, title=class_names[gt_class], xlabel='time', ylabel='det conf', legend_entries=['Det', 'GT', 'Thresh']) out_file_mat = os.path.join( out_dir_curr, 'mat_' + class_names[gt_class] + '.jpg') visualize.saveMatAsImage(affinity_copy, out_file_mat) preds.append(F.softmax(pmf[0]).data.cpu().numpy()) labels.append(label) visualize.writeHTMLForFolder(out_dir_curr) print out_dir_curr raw_input() labels = np.concatenate(labels, axis=0) preds = np.concatenate(preds, axis=0) labels[labels > 0] = 1 accuracy = sklearn.metrics.average_precision_score(labels, preds) print accuracy