def plot_detections_over_time(data_dir, out_dir, fps, smooth=False):

    frame_files = glob.glob(os.path.join(data_dir, '*.jpg'))
    frame_files.sort()
    result_files = [
        file.replace(data_dir, out_dir).replace('.jpg', '.txt')
        for file in frame_files
    ]

    det_confs = [[], []]
    time = []
    for frame, result in zip(frame_files, result_files):
        num = fps * (int(frame[frame.rindex('_') + 1:frame.rindex('.')]) -
                     1) + fps / 2.
        time.append(num / 60.)
        # time.append(int(frame[frame.rindex('_')+1:frame.rindex('.')]))
        dets = util.readLinesFromFile(result)
        if len(dets) == 0:
            [det_confs[idx].append(0) for idx in range(2)]
        else:
            class_conf = []
            for det in dets:
                det = det.split(' ')
                det = [int(det[0]), float(det[1])]
                class_conf.append(det)
            class_conf = np.array(class_conf)
            for class_curr in range(2):
                rel_dets = class_conf[class_conf[:, 0] == class_curr, :]

                if rel_dets.size == 0:
                    max_conf = 0
                else:
                    max_conf = np.max(rel_dets[:, 1])
                det_confs[class_curr].append(max_conf)

    if smooth:
        smooth_window = int(round(60 / float(fps)))
        # print smooth_window

        for idx, det in enumerate(det_confs):
            box = np.ones((smooth_window, )) / float(smooth_window)
            det_confs[idx] = np.convolve(np.array(det), box, mode='same')

    labels = ['side', 'front']
    xlabel = 'Video Time (min)'
    ylabel = 'Detection Confidence'
    title = 'Face Detections Over Time'
    out_file = os.path.join(out_dir, 'detections_over_time.jpg')
    visualize.plotSimple([(time, det_confs[0]), (time, det_confs[1])],
                         out_file=out_file,
                         xlabel=xlabel,
                         ylabel=ylabel,
                         title=title,
                         legend_entries=labels)
    print 'DETECTION GRAPH:', out_file
Exemple #2
0
def plot_graph_size():
    graph_size = [1, 2, 3, 4, 8, 16, 32]
    xticks = [str(val) for val in graph_size]
    overlap_str = ['%.1f' % val for val in np.arange(0.1, 0.6, 0.1)]
    graph_vals = [[63.7, 57.0, 47.7, 36.9,
                   26.2], [63.2, 56.7, 47.9, 37.1, 26.6],
                  [64.0, 58.2, 49.8, 39.2,
                   28.2], [63.7, 57.3, 48.2, 36.7, 25.5],
                  [62.7, 56.5, 47.0, 36.0,
                   23.9], [62.7, 55.5, 46.6, 35.3, 24.0],
                  [61.1, 54.1, 44.6, 32.8, 21.9]]
    graph_vals = np.array(graph_vals)
    graph_vals = graph_vals.T

    graph_size = [1, 2, 4, 8, 16, 32]
    xticks = [str(val) for val in graph_size]
    overlap_str = ['%.1f' % val for val in np.arange(0.1, 0.6, 0.1)]
    # graph_vals = [[   63.7  ,   57.0  ,   47.7  ,   36.9  ,   26.2],
    # [   63.2  ,   56.7  ,   47.9  ,   37.1  ,   26.6],
    # [   64.0  ,   58.2  ,   49.8  ,   39.2  ,   28.2],
    # [   63.7  ,   57.3  ,   48.2  ,   36.7  ,   25.5],
    # [   62.7  ,   56.5  ,   47.0  ,   36.0  ,   23.9],
    # [  62.7  ,   55.5  ,   46.6  ,   35.3  ,   24.0],
    # [   61.1 , 54.1 , 44.6 , 32.8 , 21.9]]
    # graph_vals = np.array(graph_vals)
    # graph_vals = graph_vals.T

    out_file = '../scratch/qualitative_figs_wacv/graph_size_anet.jpg'
    util.mkdir('../scratch/qualitative_figs_wacv')
    ylabel = 'Average Precision'
    xlabel = 'Graph Video Size'
    xAndYs = []
    legend_entries = []

    # graph_val = [63.65, 62.96, 63.16, 62.9, 61.58, 60.13]
    # title = 'THUMOS'

    graph_val = [29.44, 30.05, 30.53, 29.25, 29.85, 28.56]
    overlap_str = ['%.1f' % val for val in np.arange(0.5, 1.0, 0.2)]
    title = 'ActivityNet'

    # for idx_graph_val,graph_val in enumerate(graph_vals):
    #     if idx_graph_val<4:
    #         continue
    x = range(len(graph_val))
    xAndYs.append((x, graph_val))
    legend_entries.append(overlap_str[0] + ' Overlap')
    visualize.plotSimple(xAndYs,
                         out_file=out_file,
                         title=title,
                         xlabel=xlabel,
                         ylabel=ylabel,
                         legend_entries=legend_entries,
                         xticks=[x, xticks])
Exemple #3
0
def get_models_accuracy():
    out_dir_meta = '../experiments_dropout'
    exp_name = 'bp4d_norecon'
    wdecay = 0
    lr = [0.001, 0.001]
    route_iter = 3
    folds_all = [0, 1]
    model_name = 'khorrami_capsule_7_3_gray'
    epoch_stuff = [15, 15]
    gpu_id = 0
    # folds =
    # dropout =
    commands_all = []

    params_arr = [(0, [2], 0), (0, [2], 1)]
    # (0,[0,1],0),(0.5,[0,1],1),(0.5,[2],2)]

    xAndYs = []
    legend_strs = []
    for dropout in [0., 0.5]:
        for aug_more in [True, False]:
            val_arr_all = []
            for fold in range(3):
                params = dict(wdecay=wdecay,
                              lr=lr,
                              route_iter=route_iter,
                              model_name=model_name,
                              epoch_stuff=epoch_stuff,
                              gpu_id=gpu_id,
                              fold=fold,
                              dropout=dropout,
                              aug_more=aug_more)
                out_dir_train, pre_pend, post_pend = get_out_dir_train_name(
                    **params)
                log_file = os.path.join(out_dir_train, 'log.txt')
                assert os.path.exists(log_file)
                val_arr = get_log_val_arr(log_file)

                val_arr_all.append(val_arr)
            val_arr_all = np.array(val_arr_all)
            val_arr_all = np.mean(val_arr_all, 0)
            xAndYs.append((range(len(val_arr_all)), val_arr_all))
            legend_strs.append(' '.join(
                [str(val) for val in [dropout, aug_more]]))

    out_file = os.path.join(out_dir_meta, exp_name + '.jpg')
    visualize.plotSimple(xAndYs,
                         out_file,
                         title='Dropout',
                         xlabel='Epoch',
                         ylabel='Val Accuracy',
                         legend_entries=legend_strs,
                         outside=True)
Exemple #4
0
def save_graphs_to_look_at(model_file, graph_nums):
    out_dir_meta = model_file[:model_file.rindex('.')]
    out_dir_meta_meta = out_dir_meta + '_graph_etc'
    out_dir_viz = out_dir_meta_meta + '_viz'
    util.mkdir(out_dir_viz)
    for graph_num in graph_nums:
        out_dir_meta = out_dir_meta_meta + '_' + str(graph_num)
        assert os.path.exists(out_dir_meta)
        vid_files = glob.glob(os.path.join(out_dir_meta, '*test*.npz'))

        for vid_file in vid_files:

            npz_data = np.load(vid_file)
            vid_file = os.path.split(vid_file)[1]
            affinity = npz_data['affinity']

            gt_vecs = npz_data['gt_vecs']
            gt_classes = npz_data['gt_classes']
            x_all = npz_data['x_all']

            plotter = []
            legend_entries = []
            for gt_idx, gt_class in enumerate(gt_classes):
                gt_vec = gt_vecs[gt_idx]
                val_rel = x_all[0, :, gt_class]
                gt_vec = gt_vec / np.max(gt_vec)
                gt_vec = gt_vec * np.max(val_rel)
                # (gt_idx+1)
                x_axis = range(gt_vec.size)
                plotter.append((x_axis, gt_vec))
                plotter.append((x_axis, val_rel))
                legend_entries.append(class_names[gt_class])
                legend_entries.append(class_names[gt_class] + ' pred')

            out_file = os.path.join(
                out_dir_viz, vid_file[:vid_file.rindex('.')] + '_gt.jpg')
            visualize.plotSimple(plotter,
                                 out_file=out_file,
                                 xlabel='time',
                                 ylabel='',
                                 legend_entries=legend_entries,
                                 outside=True)

            out_file = os.path.join(
                out_dir_viz, vid_file[:vid_file.rindex('.')] + '_' +
                str(graph_num) + '.jpg')
            visualize.saveMatAsImage(affinity, out_file)

            visualize.writeHTMLForFolder(out_dir_viz)
Exemple #5
0
def save_sim_viz_mean(vid_name, out_shape_curr, sim_mat, class_idx, out_dir):
    gt_vals, det_times = get_gt_vector(vid_name, out_shape_curr, class_idx)

    out_dir_curr = os.path.join(out_dir, class_names[class_idx])
    util.mkdir(out_dir_curr)

    idx_pos = gt_vals > 0
    idx_neg = gt_vals < 1
    sim_pos_all = []
    sim_neg_all = []
    for idx_idx_curr, idx_curr in enumerate(np.where(idx_pos)[0]):
        sim_pos = sim_mat[idx_curr, idx_pos]
        sim_neg = sim_mat[idx_curr, idx_neg]
        sim_pos_all.append(sim_pos[np.newaxis, :])
        sim_neg_all.append(sim_neg[np.newaxis, :])

    sim_pos_all = np.concatenate(sim_pos_all, axis=0)
    sim_neg_all = np.concatenate(sim_neg_all, axis=0)

    sim_pos_mean = np.mean(sim_pos_all, axis=0)
    sim_neg_mean = np.mean(sim_neg_all, axis=0)

    pos_vals = np.zeros(gt_vals.shape)
    pos_vals[gt_vals > 0] = sim_pos_mean
    neg_vals = np.zeros(gt_vals.shape)
    neg_vals[gt_vals < 1] = sim_neg_mean

    max_val = max(np.max(pos_vals), np.max(neg_vals))
    gt_vals = gt_vals * max_val

    arr_plot = [(det_times, curr_arr)
                for curr_arr in [gt_vals, pos_vals, neg_vals]]
    legend_entries = ['gt', 'pos', 'neg']

    out_file_curr = os.path.join(out_dir_curr, vid_name + '.jpg')
    title = vid_name

    visualize.plotSimple(arr_plot,
                         out_file=out_file_curr,
                         title=title,
                         xlabel='time',
                         ylabel='max sim',
                         legend_entries=legend_entries)
    print out_file_curr
Exemple #6
0
def graph_overfitting():

    log_file_us = '../experiments/graph_multi_video_with_L1_retF/graph_multi_video_with_L1_retF_aft_nonlin_RL_L2_non_lin_None_sparsify_0.5_graph_size_2_sigmoid_False_graph_sum_True_deno_8_n_classes_20_in_out_2048_1024_feat_dim_2048_1024_feat_ret_True_method_cos_ucf/all_classes_False_just_primary_False_limit_None_cw_True_MultiCrossEntropyMultiBranchWithL1_CASL_500_step_250_0.1_0.001_0.001_0.001_lw_1.00_1.00_0.50__noLimit/log_det.txt'
    log_file_them = '../experiments/graph_multi_video_with_L1_retW/graph_multi_video_with_L1_retW_aft_nonlin_RL_L2_non_lin_None_sparsify_0.5_graph_size_2_sigmoid_False_graph_sum_True_deno_8_n_classes_20_in_out_2048_1024_feat_dim_2048_1024_feat_ret_True_method_cos_ucf/all_classes_False_just_primary_False_limit_None_cw_True_MultiCrossEntropyMultiBranchWithL1_CASL_250_step_250_0.1_0.001_0.001_0.001_lw_1.00_1.00_0.50__caslexp/log_det.txt'

    log_file_us = '../experiments/graph_multi_video_with_L1_retF_tanh/graph_multi_video_with_L1_retF_tanh_aft_nonlin_RL_L2_non_lin_None_sparsify_percent_0.5_graph_size_1_sigmoid_True_graph_sum_True_deno_8_n_classes_20_in_out_2048_1024_feat_dim_2048_1024_feat_ret_True_method_cos_ucf/all_classes_False_just_primary_False_limit_None_cw_False_MultiCrossEntropyMultiBranchWithL1_CASL_250_step_250_0.1_0.001_0.001_0.001_lw_1.00_1.00_1.00_forplot_0/log_det.txt'
    log_file_them = '../experiments/graph_multi_video_with_L1_retW_new/graph_multi_video_with_L1_retW_new_aft_nonlin_RL_L2_non_lin_None_sparsify_percent_0.5_graph_size_1_sigmoid_True_graph_sum_True_deno_8_n_classes_20_in_out_2048_1024_feat_dim_2048_1024_feat_ret_True_method_cos_ucf/all_classes_False_just_primary_False_limit_None_cw_False_MultiCrossEntropyMultiBranchWithL1_CASL_250_step_250_0.1_0.001_0.001_0.001_lw_1.00_1.00_1.00_actuallytanh_0/log_det.txt'

    files = [log_file_us, log_file_them]
    x_vals = range(0, 251, 10)
    xAndYs = []
    for file_curr in files:
        lines = util.readLinesFromFile(file_curr)
        lines = lines[:26]
        print lines[0]
        det_vals = []
        # for line in lines:
        #     line = [val for val in line.split(' ') if val is not '']
        #     print line
        #     det_vals.append(float(line[-1]))
        #     raw_input()
        det_vals = [float(line.split('\t')[-1]) for line in lines]
        # det_vals = det_vals[::5]
        xAndYs.append((x_vals, det_vals))

    out_file = '../scratch/qualitative_figs_wacv/graph_overfitting.jpg'
    util.mkdir('../scratch/qualitative_figs_wacv')
    legend_entries = ['Ours-MCASL', 'CASL-Graph']
    xlabel = 'Training Epoch'
    ylabel = 'Detection Accuracy'
    title = 'Detection Accuracy at 0.5 Overlap'
    visualize.plotSimple(xAndYs,
                         out_file=out_file,
                         xlabel=xlabel,
                         ylabel=ylabel,
                         legend_entries=legend_entries,
                         title=title)
Exemple #7
0
def viz_overlap_multi(out_dir_meta,
                      det_conf_all_dict,
                      out_shapes,
                      fps_stuff,
                      title=None,
                      dataset='ucf'):
    # print 'HELLO'
    # fps_stuff = 1./10.
    # activitynet = False
    print dataset
    raw_input()
    if dataset == 'activitynet':
        class_names = globals.class_names_activitynet
        gt_vid_names_all, gt_class_names, gt_time_intervals_all = load_activitynet_gt(
            False)
    elif dataset == 'activitynet_select':
        class_names = globals.class_names_activitynet_select
        gt_vid_names_all, gt_class_names, gt_time_intervals_all = load_activitynet_gt(
            False, True)
    elif dataset == 'multithumos':
        class_names = globals.class_names_multithumos
        gt_vid_names_all, gt_class_names, gt_time_intervals_all = load_multithumos_gt(
            False)
    else:
        class_names = [
            'BaseballPitch', 'BasketballDunk', 'Billiards', 'CleanAndJerk',
            'CliffDiving', 'CricketBowling', 'CricketShot', 'Diving',
            'FrisbeeCatch', 'GolfSwing', 'HammerThrow', 'HighJump',
            'JavelinThrow', 'LongJump', 'PoleVault', 'Shotput',
            'SoccerPenalty', 'TennisSwing', 'ThrowDiscus', 'VolleyballSpiking'
        ]
        gt_vid_names_all, gt_class_names, gt_time_intervals_all = load_ucf_gt(
            False)
        class_names.sort()

    aps = np.zeros((len(class_names) + 1, 5))
    overlap_thresh_all = np.arange(0.1, 0.6, 0.1)

    for idx_class_name, class_name in enumerate(class_names):
        # if idx_class_name<6:
        #     continue
        out_dir = os.path.join(out_dir_meta, class_name)
        util.mkdir(out_dir)

        # if not dataset.startswith('activitynet'):
        #     mat_file = os.path.join('../TH14evalkit','mat_files', class_name+'_test.mat')

        #     loaded = scipy.io.loadmat(mat_file)

        #     gt_vid_names_all = loaded['gtvideonames'][0]
        #     gt_class_names = loaded['gt_events_class'][0]

        #     gt_time_intervals = loaded['gt_time_intervals'][0]

        #     arr_meta = [gt_vid_names_all, gt_class_names]
        #     arr_out = []
        #     for arr_curr in arr_meta:
        #         arr_curr = [str(a[0]) for a in arr_curr]
        #         arr_out.append(arr_curr)

        #     [gt_vid_names_all, gt_class_names] = arr_out
        #     gt_time_intervals_all = np.array([a[0] for a in gt_time_intervals])

        gt_vid_names = list(
            np.unique(
                np.array(gt_vid_names_all)[np.array(gt_class_names) ==
                                           class_name]))

        for gt_vid_name in gt_vid_names:

            gt_time_intervals = gt_time_intervals_all[np.logical_and(
                np.array(gt_vid_names_all) == gt_vid_name,
                np.array(gt_class_names) == class_name)]
            if gt_vid_name not in list(out_shapes.keys()):
                continue

            out_shape_curr = out_shapes[gt_vid_name]
            det_times = np.array(range(0, out_shape_curr + 1)) * fps_stuff
            gt_vals = np.zeros(det_times.shape)

            plot_arr = []
            legend_entries = []

            max_det_conf = None
            for k in det_conf_all_dict.keys():

                [
                    det_conf_curr, det_time_intervals_all,
                    det_events_class_all, det_vid_names
                ] = det_conf_all_dict[k]

                bin_keep = det_vid_names == gt_vid_name

                # print det_vid_names[0], gt_vid_names, np.sum(bin_keep)

                bin_keep = np.logical_and(
                    bin_keep, det_events_class_all == idx_class_name)
                if np.sum(bin_keep) == 0:
                    # print 'Continuing'
                    continue

                # print 'not Continuing'
                # det_conf_curr = det_conf_all_dict[k][0]
                det_time_intervals_merged = det_time_intervals_all[bin_keep, :]
                det_conf_curr = det_conf_curr[bin_keep]
                # print k, det_time_intervals_merged

                if max_det_conf is None:
                    max_det_conf = np.max(det_conf_curr)
                else:
                    max_det_conf = max(max_det_conf, np.max(det_conf_curr))

                det_vals = np.zeros(det_times.shape)
                for idx_det_time_curr, det_time_curr in enumerate(
                        det_time_intervals_merged):
                    idx_start = np.argmin(np.abs(det_times - det_time_curr[0]))
                    idx_end = np.argmin(np.abs(det_times - det_time_curr[1]))
                    det_vals[idx_start:idx_end] = det_conf_curr[
                        idx_det_time_curr]

                legend_entries.append(k)
                plot_arr.append((det_times, det_vals))

            for gt_time_curr in gt_time_intervals:
                idx_start = np.argmin(np.abs(det_times - gt_time_curr[0]))
                idx_end = np.argmin(np.abs(det_times - gt_time_curr[1]))
                gt_vals[idx_start:idx_end] = max_det_conf

            plot_arr.append((det_times, gt_vals))
            legend_entries.append('GT')

            out_file_curr = os.path.join(out_dir, gt_vid_name + '.jpg')

            if title is None:
                title = 'det conf over time'
            # print plot_arr
            out_file_first = out_file_curr[:out_file_curr.rindex('.')]
            # plot_arr_for_save = {}
            for idx_arr in range(len(plot_arr)):
                # plot_arr_for_save[legend_entries[idx_arr]]=np.array(list(plot_arr[idx_arr]))
                arr_curr = np.array(list(plot_arr[idx_arr]))
                np.save(
                    out_file_first + '_' + legend_entries[idx_arr] + '.npy',
                    arr_curr)

                # print legend_entries[idx_arr],plot_arr_for_save[legend_entries[idx_arr]].shape

            visualize.plotSimple(plot_arr,
                                 out_file=out_file_curr,
                                 title=title,
                                 xlabel='Time',
                                 ylabel='Detection Confidence',
                                 legend_entries=legend_entries)

        visualize.writeHTMLForFolder(out_dir)
def main():

    gt_vec_dir = '../experiments/graph_multi_video_same_F_ens_dll_moredepth/graph_multi_video_same_F_ens_dll_moredepth_aft_nonlin_HT_L2_non_lin_HT_num_graphs_1_sparsify_0.5_graph_size_2_sigmoid_True_deno_0.5_n_classes_20_in_out_2048_256_feat_dim_2048_512_method_cos_zero_self_ucf/all_classes_False_just_primary_False_limit_None_cw_True_MultiCrossEntropy_300_step_300_0.1_0.001_0.001_0.001_ABS_bias_retry/model_299_graph_etc_0'

    dir_results = '../experiments/graph_multi_video_same_i3dF/graph_multi_video_same_i3dF_aft_nonlin_HT_l2_sparsify_0.5_non_lin_None_method_cos_zero_self_deno_8_n_classes_20_in_out_2048_2_graph_size_2_ucf/all_classes_False_just_primary_False_limit_None_cw_True_MultiCrossEntropy_100_step_100_0.1_0.001_0.01_ABS_bias_wb/results_model_99_0_0.5'

    dir_results = '../experiments/graph_multi_video_same_i3dF/graph_multi_video_same_i3dF_aft_nonlin_HT_l2_non_lin_None_sparsify_False_graph_size_2_method_cos_zero_self_deno_8_n_classes_20_in_out_2048_2_graph_sum_True_ucf/all_classes_False_just_primary_False_limit_None_cw_True_MultiCrossEntropyMultiBranchWithL1_100_step_100_0.1_0.001_0.001_lw_1.00_0.50_ABS_bias_wb/results_model_99_0_0.5'

    feat_dir = os.path.join(dir_results, 'outf')
    out_dir = os.path.join(dir_results, 'viz_outf')
    util.mkdir(out_dir)

    class_idx_keep = range(10, 16)

    plot_classes(feat_dir, gt_vec_dir, out_dir, class_idx_keep)

    return

    g_str = 'graph_2_nononlin_b'
    lin_str = 'lin_2_nononlin'
    graph_feat_dir = os.path.join('../scratch', g_str)
    lin_feat_dir = os.path.join('../scratch', lin_str)
    out_dir = '../scratch/comparing_features'
    util.mkdir(out_dir)

    vid_names = glob.glob(os.path.join(graph_feat_dir, '*.npy'))
    vid_names = [os.path.split(file_curr)[1][:-4] for file_curr in vid_names]

    graph_features = [[] for i in range(20)]
    lin_features = [[] for i in range(20)]

    for vid_name in vid_names:
        gt_file = os.path.join(gt_vec_dir, vid_name + '.npz')
        npz_data = np.load(gt_file)
        gt_vecs = npz_data['gt_vecs']
        gt_classes = npz_data['gt_classes']

        graph_data = np.load(os.path.join(graph_feat_dir, vid_name + '.npy'))
        lin_data = np.load(os.path.join(lin_feat_dir, vid_name + '.npy'))

        if gt_classes.size > 1:
            continue

        bin_curr = gt_vecs[0] > 0
        graph_data_rel = graph_data[bin_curr, :]
        lin_data_rel = lin_data[bin_curr, :]
        print graph_data_rel.shape
        print lin_data_rel.shape
        class_curr = int(gt_classes[0])

        lin_features[class_curr].append(lin_data_rel)
        graph_features[class_curr].append(graph_data_rel)

    # lin_features = [np.concatenate(lin_feat,axis = 0) for lin_feat in lin_features]
    # graph_features = [np.concatenate(graph_feat,axis = 0) for graph_feat in graph_features]

    titles = [lin_str, g_str]
    class_names_curr = class_names[10:16]
    lin_features = lin_features[10:16]
    graph_features = graph_features[10:16]

    for idx_features, features in enumerate([lin_features, graph_features]):
        class_names_keep = [
            class_names_curr[idx] for idx, f in enumerate(features)
            if len(f) > 0
        ]
        features = [np.concatenate(f, axis=0) for f in features if len(f) > 0]

        xAndYs = [(f[:, 0], f[:, 1]) for f in features]

        title = titles[idx_features]
        out_file = os.path.join(out_dir, title + '.jpg')
        xlabel = 'x'
        ylabel = 'y'
        legend_entries = class_names_keep
        visualize.plotSimple(xAndYs,
                             out_file=out_file,
                             title=title,
                             xlabel=xlabel,
                             ylabel=ylabel,
                             legend_entries=legend_entries,
                             outside=True,
                             noline=True)
        print out_file
Exemple #9
0
def viz_overlap(out_dir_meta, det_vid_names, det_conf_all,
                det_time_intervals_all, det_events_class_all, out_shapes):

    class_names = [
        'BaseballPitch', 'BasketballDunk', 'Billiards', 'CleanAndJerk',
        'CliffDiving', 'CricketBowling', 'CricketShot', 'Diving',
        'FrisbeeCatch', 'GolfSwing', 'HammerThrow', 'HighJump', 'JavelinThrow',
        'LongJump', 'PoleVault', 'Shotput', 'SoccerPenalty', 'TennisSwing',
        'ThrowDiscus', 'VolleyballSpiking'
    ]
    class_names.sort()

    aps = np.zeros((len(class_names) + 1, 5))
    overlap_thresh_all = np.arange(0.1, 0.6, 0.1)

    for idx_class_name, class_name in enumerate(class_names):
        # if idx_class_name<6:
        #     continue
        out_dir = os.path.join(out_dir_meta, class_name)
        util.mkdir(out_dir)

        mat_file = os.path.join('../TH14evalkit', 'mat_files',
                                class_name + '_test.mat')

        loaded = scipy.io.loadmat(mat_file)

        gt_vid_names_all = loaded['gtvideonames'][0]
        gt_class_names = loaded['gt_events_class'][0]

        gt_time_intervals = loaded['gt_time_intervals'][0]

        arr_meta = [gt_vid_names_all, gt_class_names]
        arr_out = []
        for arr_curr in arr_meta:
            arr_curr = [str(a[0]) for a in arr_curr]
            arr_out.append(arr_curr)

        [gt_vid_names_all, gt_class_names] = arr_out
        gt_time_intervals_all = np.array([a[0] for a in gt_time_intervals])

        gt_vid_names = list(
            np.unique(
                np.array(gt_vid_names_all)[np.array(gt_class_names) ==
                                           class_name]))
        det_vid_names = np.array(det_vid_names)
        # print len(det_vid_names)
        # print np.unique(det_vid_names).shape
        # print gt_vid_name
        # print len(gt_vid_names)

        for gt_vid_name in gt_vid_names:
            bin_keep = det_vid_names == gt_vid_name
            # if np.sum(bin_keep):
            #    print idx_class_name, np.sum(bin_keep)
            # print gt_vid_name
            # print np.sum(det_events_class_all==idx_class_name)
            # raw_input()
            bin_keep = np.logical_and(bin_keep,
                                      det_events_class_all == idx_class_name)
            if np.sum(bin_keep) == 0:
                print 'Continuing'
                continue
            gt_time_intervals = gt_time_intervals_all[np.array(
                gt_vid_names_all) == gt_vid_name]

            det_conf = det_conf_all[bin_keep]
            det_time_intervals = det_time_intervals_all[bin_keep, :]

            out_shape_curr = out_shapes[bin_keep]
            assert len(np.unique(out_shape_curr)) == 1
            out_shape_curr = np.unique(out_shape_curr)[0]
            # print out_shape_curr

            det_time_intervals_merged = det_time_intervals

            det_times = det_time_intervals[:, 0]

            det_times = np.array(range(0, out_shape_curr + 1)) * 16. / 25.

            gt_vals = np.zeros(det_times.shape)
            # print gt_time_intervals.shape
            # print det_times.shape

            for gt_time_curr in gt_time_intervals:
                idx_start = np.argmin(np.abs(det_times - gt_time_curr[0]))
                idx_end = np.argmin(np.abs(det_times - gt_time_curr[1]))
                gt_vals[idx_start:idx_end] = np.max(det_conf)

            det_vals = np.zeros(det_times.shape)
            for idx_det_time_curr, det_time_curr in enumerate(
                    det_time_intervals_merged):
                idx_start = np.argmin(np.abs(det_times - det_time_curr[0]))
                idx_end = np.argmin(np.abs(det_times - det_time_curr[1]))
                det_vals[idx_start:idx_end] = det_conf[idx_det_time_curr]

            out_file_curr = os.path.join(out_dir, gt_vid_name + '.jpg')

            visualize.plotSimple([(det_times, det_vals), (det_times, gt_vals)],
                                 out_file=out_file_curr,
                                 title='det conf over time',
                                 xlabel='time',
                                 ylabel='det conf',
                                 legend_entries=['Det', 'GT'])

        visualize.writeHTMLForFolder(out_dir)
Exemple #10
0
def script_debug_old_testing():
    print 'hello'

    train = False

    det_file = '../scratch/debug_det.npz'
    out_dir_meta = '../scratch/seeing_dets'
    util.mkdir(out_dir_meta)

    det_data = np.load(det_file)

    det_vid_names = det_data['det_vid_names']
    det_conf_all = det_data['det_conf']
    det_time_intervals_all = det_data['det_time_intervals']
    print det_vid_names.shape, det_vid_names[0], det_vid_names[-1]
    print det_conf_all.shape, np.min(det_conf_all), np.max(det_conf_all)
    print det_time_intervals_all.shape, np.min(det_time_intervals_all), np.max(
        det_time_intervals_all)

    class_names = [
        'BaseballPitch', 'BasketballDunk', 'Billiards', 'CleanAndJerk',
        'CliffDiving', 'CricketBowling', 'CricketShot', 'Diving',
        'FrisbeeCatch', 'GolfSwing', 'HammerThrow', 'HighJump', 'JavelinThrow',
        'LongJump', 'PoleVault', 'Shotput', 'SoccerPenalty', 'TennisSwing',
        'ThrowDiscus', 'VolleyballSpiking'
    ]
    class_names.sort()

    aps = np.zeros((len(class_names) + 1, 5))
    overlap_thresh_all = np.arange(0.1, 0.6, 0.1)

    for idx_class_name, class_name in enumerate(class_names):
        # if idx_class_name<6:
        #     continue
        out_dir = os.path.join(out_dir_meta, class_name)
        util.mkdir(out_dir)

        if train:
            mat_file = os.path.join('../TH14evalkit', class_name + '.mat')
        else:
            mat_file = os.path.join('../TH14evalkit', 'mat_files',
                                    class_name + '_test.mat')

        loaded = scipy.io.loadmat(mat_file)

        gt_vid_names_all = loaded['gtvideonames'][0]
        gt_class_names = loaded['gt_events_class'][0]

        gt_time_intervals = loaded['gt_time_intervals'][0]

        arr_meta = [gt_vid_names_all, gt_class_names]
        arr_out = []
        for arr_curr in arr_meta:
            arr_curr = [str(a[0]) for a in arr_curr]
            arr_out.append(arr_curr)

        [gt_vid_names_all, gt_class_names] = arr_out
        gt_time_intervals_all = np.array([a[0] for a in gt_time_intervals])

        gt_vid_names = list(
            np.unique(
                np.array(gt_vid_names_all)[np.array(gt_class_names) ==
                                           class_name]))
        print class_name, len(gt_vid_names)

        for gt_vid_name in gt_vid_names:
            print gt_vid_name
            bin_keep = det_vid_names == gt_vid_name
            gt_time_intervals = gt_time_intervals_all[np.array(
                gt_vid_names_all) == gt_vid_name]

            # print det_vid_names[bin_keep]
            print det_conf_all.shape, idx_class_name, bin_keep.shape
            det_conf = det_conf_all[bin_keep, idx_class_name]
            det_time_intervals = det_time_intervals_all[bin_keep, :]

            thresh = np.max(det_conf) - (np.max(det_conf) -
                                         np.min(det_conf)) * 0.5
            bin_second_thresh = det_conf > thresh

            det_conf, det_time_intervals_merged = merge_detections(
                bin_second_thresh, det_conf, det_time_intervals)

            # det_conf[det_conf<thresh]=0

            det_times = det_time_intervals[:, 0]

            gt_vals = np.zeros(det_times.shape)
            for gt_time_curr in gt_time_intervals:
                idx_start = np.argmin(np.abs(det_times - gt_time_curr[0]))
                idx_end = np.argmin(np.abs(det_times - gt_time_curr[1]))
                gt_vals[idx_start:idx_end] = np.max(det_conf)

            det_vals = np.zeros(det_times.shape)
            for idx_det_time_curr, det_time_curr in enumerate(
                    det_time_intervals_merged):
                idx_start = np.argmin(np.abs(det_times - det_time_curr[0]))
                idx_end = np.argmin(np.abs(det_times - det_time_curr[1]))
                det_vals[idx_start:idx_end] = det_conf[idx_det_time_curr]

            out_file_curr = os.path.join(out_dir,
                                         'dets_' + gt_vid_name + '_merged.jpg')

            visualize.plotSimple([(det_times, det_vals), (det_times, gt_vals)],
                                 out_file=out_file_curr,
                                 title='det conf over time',
                                 xlabel='time',
                                 ylabel='det conf',
                                 legend_entries=['Det', 'GT'])

            # print out_file_curr
            # raw_input()

        visualize.writeHTMLForFolder(out_dir)
def plot_classes(feat_dir, gt_vec_dir, out_dir, class_idx_keep):
    # g_str = 'graph_2_nononlin_b'
    # lin_str = 'lin_2_nononlin'
    # graph_feat_dir = os.path.join('../scratch',g_str)
    # lin_feat_dir = os.path.join('../scratch',lin_str)
    # out_dir = '../scratch/comparing_features'
    n_classes = len(class_names)

    if class_idx_keep is None:
        class_idx_keep = range(n_classes)

    util.mkdir(out_dir)

    vid_names = glob.glob(os.path.join(feat_dir, '*.npy'))
    vid_names = [os.path.split(file_curr)[1][:-4] for file_curr in vid_names]

    graph_features = [[] for i in range(n_classes)]

    for vid_name in vid_names:
        gt_file = os.path.join(gt_vec_dir, vid_name + '.npz')
        npz_data = np.load(gt_file)
        gt_vecs = npz_data['gt_vecs']
        gt_classes = npz_data['gt_classes']

        graph_data = np.load(os.path.join(feat_dir, vid_name + '.npy'))

        for idx_gt, gt_vec in enumerate(gt_vecs):
            class_curr = int(gt_classes[idx_gt])
            if class_curr not in class_idx_keep:
                continue
            bin_curr = gt_vec > 0
            graph_data_rel = graph_data[bin_curr, :]

            graph_features[class_curr].append(graph_data_rel)

    class_names_curr = np.array(class_names)[class_idx_keep]
    graph_features = [graph_features[idx] for idx in class_idx_keep]

    class_names_keep = [
        class_names_curr[idx] for idx, f in enumerate(graph_features)
        if len(f) > 0
    ]
    features = [
        np.concatenate(f, axis=0) for f in graph_features if len(f) > 0
    ]

    xAndYs = [(f[:, 0], f[:, 1]) for f in features]

    title = 'Features'
    out_file = '_'.join(class_names_keep) + '.jpg'
    out_file = os.path.join(out_dir, out_file)

    xlabel = 'x'
    ylabel = 'y'
    legend_entries = class_names_keep
    visualize.plotSimple(xAndYs,
                         out_file=out_file,
                         title=title,
                         xlabel=xlabel,
                         ylabel=ylabel,
                         legend_entries=legend_entries,
                         outside=True,
                         noline=True)
    print out_file
Exemple #12
0
def script_viewing_sim():

    dir_files = '../data/ucf101/train_test_files'
    n_classes = 20
    train_file = os.path.join(dir_files, 'train_just_primary.txt')
    test_file = os.path.join(dir_files, 'test_just_primary.txt')

    out_dir = '../scratch/debugging_graph_self1'
    util.mkdir(out_dir)

    train_lines = util.readLinesFromFile(test_file)
    train_npy = [line_curr.split(' ') for line_curr in train_lines]
    for line_curr in train_lines:
        line_curr = line_curr.split(' ')
        npy_file = line_curr[0]
        anno = [int(val) for val in line_curr[1:]]
        anno = np.array(anno)
        assert np.sum(anno) == 1
        class_idx = np.where(anno)[0][0]

        out_dir_curr = os.path.join(out_dir, class_names[class_idx])
        util.mkdir(out_dir_curr)

        features = np.load(npy_file)
        out_shape_curr = features.shape[0]
        vid_name = os.path.split(npy_file)[1]
        vid_name = vid_name[:vid_name.rindex('.')]

        sim_mat = get_similarity(features)
        gt_vals, det_times = get_gt_vector(vid_name, out_shape_curr, class_idx)

        # idx_pos = np.where(gt_vals>0)[0]

        idx_pos = gt_vals > 0
        idx_neg = gt_vals < 1
        # print idx_pos
        sim_pos_all = []
        sim_neg_all = []
        for idx_idx_curr, idx_curr in enumerate(np.where(idx_pos)[0]):

            sim_pos = sim_mat[idx_curr, idx_pos]
            sim_neg = sim_mat[idx_curr, idx_neg]
            sim_pos_all.append(sim_pos[np.newaxis, :])
            sim_neg_all.append(sim_neg[np.newaxis, :])

            # idx_pos_leave = np.in1d
            # sim_rel = sim_mat[idx_curr, idx_pos]
            # print sim_rel.shape
            # print sim_rel
            # print sim_rel[idx_idx_curr]
            # print np.min(sim_rel), np.max(sim_rel), np.mean(sim_rel)
            # sim_rel = sim_mat[idx_curr, :]
            # print sim_rel.shape
            # print np.min(sim_rel), np.max(sim_rel), np.mean(sim_rel)

        sim_pos_all = np.concatenate(sim_pos_all, axis=0)
        sim_neg_all = np.concatenate(sim_neg_all, axis=0)
        # print sim_pos_all.shape
        # print sim_neg_all.shape

        sim_pos_mean = np.mean(sim_pos_all, axis=0)
        sim_neg_mean = np.mean(sim_neg_all, axis=0)

        pos_vals = np.zeros(gt_vals.shape)
        pos_vals[gt_vals > 0] = sim_pos_mean
        neg_vals = np.zeros(gt_vals.shape)
        neg_vals[gt_vals < 1] = sim_neg_mean

        arr_plot = [(det_times, curr_arr)
                    for curr_arr in [gt_vals, pos_vals, neg_vals]]
        legend_entries = ['gt', 'pos', 'neg']
        out_file_curr = os.path.join(out_dir_curr, vid_name + '.jpg')
        title = vid_name

        visualize.plotSimple(arr_plot,
                             out_file=out_file_curr,
                             title=title,
                             xlabel='time',
                             ylabel='max sim',
                             legend_entries=legend_entries)
        print out_file_curr
        # print sim_pos_mean
        # print sim_neg_mean

        # break

    for class_name in class_names:
        out_dir_curr = os.path.join(out_dir, class_name)
        visualize.writeHTMLForFolder(out_dir_curr)
Exemple #13
0
def train_model(out_dir_train,
                train_file,
                test_file,
                data_transforms,
                batch_size=None,
                batch_size_val=None,
                num_epochs=100,
                save_after=20,
                disp_after=1,
                plot_after=10,
                test_after=1,
                lr=0.0001,
                dec_after=100,
                model_name='alexnet'):

    util.mkdir(out_dir_train)
    log_file = os.path.join(out_dir_train, 'log.txt')
    plot_file = os.path.join(out_dir_train, 'loss.jpg')
    log_arr = []
    plot_arr = [[], []]
    plot_val_arr = [[], []]

    train_data = dataset.Horse_Image_Dataset(train_file,
                                             data_transforms['train'])
    test_data = dataset.Horse_Image_Dataset(test_file, data_transforms['val'])

    if batch_size is None:
        batch_size = len(train_data)

    if batch_size_val is None:
        batch_size_val = len(test_data)

    train_dataloader = torch.utils.data.DataLoader(train_data,
                                                   batch_size=batch_size,
                                                   shuffle=True,
                                                   num_workers=0)

    test_dataloader = torch.utils.data.DataLoader(test_data,
                                                  batch_size=batch_size_val,
                                                  shuffle=False,
                                                  num_workers=0)

    class_weights = get_class_weights(util.readLinesFromFile(train_file))

    torch.cuda.device(0)
    iter_begin = 0

    network = models.get('caps_alexnet_simple')

    model = network.model.cuda()
    # model.train(True)
    # criterion = nn.CrossEntropyLoss(weight = torch.FloatTensor(class_weights).cuda())

    optimizer = optim.Adam(network.get_lr_list(lr), lr=0)

    if dec_after is not None:
        exp_lr_scheduler = lr_scheduler.StepLR(optimizer,
                                               step_size=dec_after,
                                               gamma=0.1)

    for num_epoch in range(num_epochs):

        for num_iter_train, batch in enumerate(train_dataloader):

            # print batch['image'].shape,torch.min(batch['image']),torch.max(batch['image'])
            # im = np.transpose(batch['image'][0].numpy(),(1,2,0))
            # im = batch['image'][0].numpy()

            # print im.shape
            # scipy.misc.imsave('../scratch/check.jpg',im)
            # raw_input()
            data = Variable(batch['image'].cuda())
            one_hot = models.utils.one_hot_encode(batch['label'], 2)
            loss_weights = torch.FloatTensor(
                np.tile(
                    np.array(class_weights)[np.newaxis, :],
                    (one_hot.shape[0], 1)))
            one_hot = torch.mul(one_hot, loss_weights)
            labels = Variable(one_hot).cuda()

            # labels = Variable(models.utils.one_hot_encode(batch['label'],2)).cuda()
            output = model(data)  # output from DigitCaps (out_digit_caps)
            loss = model.loss(data, output,
                              labels)  # pass in data for image reconstruction
            loss.backward()
            loss_iter = loss.data[0]
            optimizer.step()

            num_iter = num_epoch * len(train_dataloader) + num_iter_train
            # num_iter +=1
            plot_arr[0].append(num_iter)
            plot_arr[1].append(loss_iter)

            str_display = 'lr: %.6f, iter: %d, loss: %.4f' % (
                optimizer.param_groups[-1]['lr'], num_iter, loss_iter)
            log_arr.append(str_display)
            print str_display

            if num_iter % plot_after == 0 and num_iter > 0:
                util.writeFile(log_file, log_arr)
                if len(plot_val_arr[0]) == 0:
                    visualize.plotSimple([(plot_arr[0], plot_arr[1])],
                                         out_file=plot_file,
                                         title='Loss',
                                         xlabel='Iteration',
                                         ylabel='Loss',
                                         legend_entries=['Train'])
                else:
                    visualize.plotSimple([(plot_arr[0], plot_arr[1]),
                                          (plot_val_arr[0], plot_val_arr[1])],
                                         out_file=plot_file,
                                         title='Loss',
                                         xlabel='Iteration',
                                         ylabel='Loss',
                                         legend_entries=['Train', 'Val'])

        if num_epoch % test_after == 0:
            model.eval()
            for num_iter_test, batch in enumerate(test_dataloader):
                # data = Variable(batch['image'].cuda())
                # labels = Variable(torch.LongTensor(batch['label']).cuda())
                # loss = criterion(model(data), labels)
                # loss_iter = loss.data[0]

                data = Variable(batch['image'].cuda())
                # labels = Variable(models.utils.one_hot_encode(batch['label'],2)).cuda()

                one_hot = models.utils.one_hot_encode(batch['label'], 2)
                loss_weights = torch.FloatTensor(
                    np.tile(
                        np.array(class_weights)[np.newaxis, :],
                        (one_hot.shape[0], 1)))
                one_hot = torch.mul(one_hot, loss_weights)
                labels = Variable(one_hot).cuda()

                output = model(data)  # output from DigitCaps (out_digit_caps)
                loss = model.loss(
                    data, output,
                    labels)  # pass in data for image reconstruction
                # loss.backward()
                loss_iter = loss.data[0]
                optimizer.step()

                # test_epoch = num_epoch/test_after
                num_iter = num_epoch * len(train_dataloader) + num_iter_test
                # +=1
                #
                plot_val_arr[0].append(num_iter)
                plot_val_arr[1].append(loss_iter)

                str_display = 'lr: %.6f, val iter: %d, val loss: %.4f' % (
                    optimizer.param_groups[-1]['lr'], num_iter, loss_iter)
                log_arr.append(str_display)
                print str_display
            model.train(True)

        if num_epoch % save_after == 0:
            out_file = os.path.join(out_dir_train,
                                    'model_' + str(num_epoch) + '.pt')
            print 'saving', out_file
            torch.save(model, out_file)

        if dec_after is not None:
            exp_lr_scheduler.step()

    out_file = os.path.join(out_dir_train, 'model_' + str(num_epoch) + '.pt')
    print 'saving', out_file
    torch.save(model, out_file)

    print plot_arr[0]

    util.writeFile(log_file, log_arr)
    if len(plot_val_arr[0]) == 0:
        visualize.plotSimple([(plot_arr[0], plot_arr[1])],
                             out_file=plot_file,
                             title='Loss',
                             xlabel='Iteration',
                             ylabel='Loss',
                             legend_entries=['Train'])
    else:
        visualize.plotSimple([(plot_arr[0], plot_arr[1]),
                              (plot_val_arr[0], plot_val_arr[1])],
                             out_file=plot_file,
                             title='Loss',
                             xlabel='Iteration',
                             ylabel='Loss',
                             legend_entries=['Train', 'Val'])
Exemple #14
0
def visualizing_attention():

    model_file = '../experiments/graph_multi_video_multi_F_joint_train_normalize_True_True_non_lin_HT_sparsify_True_graph_size_2_deno_8_n_classes_20_in_out_2048_64_feat_dim_2048_64_method_cos_ucf/all_classes_False_just_primary_False_limit_500_cw_True_MultiCrossEntropyMultiBranch_300_step_300_0.1_0.001_0.001_lw_0.5_0.5_ABS/model_299.pt'

    model = torch.load(model_file).cuda()
    model.eval()

    train_data, test_train_data, test_data, n_classes, trim_preds = emb.get_data(
        'ucf', 500, False, just_primary=False, gt_vec=False)

    # test_data = train_data
    # test_bool = False
    # test_data.feature_limit = None

    batch_size = 1
    branch_to_test = 0
    out_dir_meta = model_file[:model_file.rindex('.')]
    out_dir_meta = out_dir_meta + '_visualizing_attention_max' + str(
        branch_to_test)
    util.mkdir(out_dir_meta)
    print out_dir_meta

    anno_file = test_data.anno_file

    vid_names, annos = readTrainTestFile(anno_file)

    test_dataloader = torch.utils.data.DataLoader(
        test_data,
        batch_size=batch_size,
        collate_fn=test_data.collate_fn,
        shuffle=False,
        num_workers=1)

    import torch.nn.functional as F
    preds = []
    labels = []

    for idx_data, data in enumerate(test_dataloader):

        gt_classes = np.where(annos[idx_data])[0]
        vid_name = os.path.split(vid_names[idx_data])[1]
        vid_name = vid_name[:vid_name.rindex('.')]

        # out_dir_curr = os.path.join(out_dir_meta,vid_name)
        # util.mkdir(out_dir_curr)

        label = data['label'].cpu().data.numpy()

        # affinity = model.get_similarity(data['features'],sparsify = True)

        x_all, pmf = model(data['features'], branch_to_test=branch_to_test)
        assert len(pmf) == 1

        x_all = torch.cat([x_all_curr.unsqueeze(0) for x_all_curr in x_all], 0)
        x_all = F.softmax(x_all, dim=1)
        x_all = x_all.data.cpu().numpy()
        # affinity = affinity.data.cpu().numpy()
        test_bool = True
        max_all = np.max(x_all, axis=1)

        #

        for gt_class in gt_classes:
            class_name_curr = class_names[gt_class]
            print class_name_curr
            out_dir_curr = os.path.join(out_dir_meta, class_name_curr)
            util.mkdir(out_dir_curr)

            # affinity_copy = np.array(affinity)
            x_rel = x_all[:, gt_class]
            thresh = np.max(x_rel) - (np.max(x_rel) - np.min(x_rel)) * 0.5
            gt_vec, _ = get_gt_vector(vid_name,
                                      x_rel.shape[0],
                                      gt_class,
                                      test=test_bool)
            if np.sum(gt_vec) == 0:
                print 'we got an anno problem', vid_name
                continue

            out_file_curr = os.path.join(out_dir_curr, vid_name + '.jpg')

            # bin_keep = gt_vec.astype(int)
            # bin_keep_rot = np.roll(bin_keep, 1)
            # bin_keep_rot[0] = 0
            # diff = bin_keep - bin_keep_rot
            # # diff[-3]=1
            # idx_start_all = list(np.where(diff==1)[0])
            # idx_end_all = list(np.where(diff==-1)[0])
            # idx_borders = np.array(idx_start_all+idx_end_all)

            # # print idx_borders
            # # raw_input()
            # affinity_copy[:,idx_borders]=np.max(affinity_copy)
            # affinity_copy[idx_borders,:]=np.max(affinity_copy)

            gt_vec = gt_vec * np.max(x_rel)
            x_axis = range(x_rel.size)

            thresh = thresh * np.ones(x_rel.shape)

            # out_file_curr = os.path.join(out_dir_curr,'det_confs_'+class_names[gt_class]+'.jpg')

            visualize.plotSimple(
                [(x_axis, x_rel), (x_axis, gt_vec), (x_axis, thresh),
                 (x_axis, max_all)],
                out_file=out_file_curr,
                title=class_names[gt_class],
                xlabel='time',
                ylabel='det conf',
                legend_entries=['Det', 'GT', 'Thresh', 'Attn'])

            # out_file_mat = os.path.join(out_dir_curr,'mat_'+class_names[gt_class]+'.jpg')
            # visualize.saveMatAsImage(affinity_copy, out_file_mat)

        preds.append(F.softmax(pmf[0]).data.cpu().numpy())

        labels.append(label)
        visualize.writeHTMLForFolder(out_dir_curr)

    labels = np.concatenate(labels, axis=0)
    preds = np.concatenate(preds, axis=0)
    labels[labels > 0] = 1

    accuracy = sklearn.metrics.average_precision_score(labels, preds)
    print accuracy
def visualizing_threshes():
    anno_file = '../data/activitynet/train_test_files/val.txt'
    # res_dir = '../experiments/graph_multi_video_with_L1_retF/graph_multi_video_with_L1_retF_aft_nonlin_RL_L2_non_lin_None_sparsify_0.5_graph_size_2_sigmoid_False_graph_sum_True_deno_8_n_classes_100_in_out_2048_1024_feat_dim_2048_1024_feat_ret_True_method_cos_activitynet/all_classes_False_just_primary_False_limit_None_cw_True_BinaryCrossEntropyMultiBranchWithL1_CASL_250_step_250_0.1_0.001_0.001_0.001_lw_1.00_1.00_1.00__cwOld_MulNumClasses_numSim_64/results_model_249_original_class_0_0.5_-2/outf'
    # out_dir = '../scratch/looking_at_anet/viz_pred_gt'

    # res_dir = '../experiments/graph_multi_video_with_L1_retF/graph_multi_video_with_L1_retF_aft_nonlin_RL_L2_non_lin_None_sparsify_percent_0.5_graph_size_2_sigmoid_False_graph_sum_True_deno_8_n_classes_100_in_out_2048_1024_feat_dim_2048_1024_feat_ret_True_method_cos_activitynet/all_classes_False_just_primary_False_limit_None_cw_False_MultiCrossEntropyMultiBranchWithL1_CASL_250_step_250_0.1_0.001_0.001_0.001_lw_1.00_1.00_1.00_changingSparsityAbs_128/results_model_249_original_class_0_0.5_-2/outf'
    # dataset = 'anet'

    # res_dir = '../experiments/graph_multi_video_with_L1_retF/graph_multi_video_with_L1_retF_aft_nonlin_RL_L2_non_lin_None_sparsify_static_median_mean_graph_size_2_sigmoid_False_graph_sum_True_deno_8_n_classes_100_in_out_2048_1024_feat_dim_2048_1024_feat_ret_True_method_cos_activitynet/all_classes_False_just_primary_False_limit_None_cw_False_MultiCrossEntropyMultiBranchWithL1_CASL_250_step_250_0.1_0.001_0.001_0.001_lw_1.00_1.00_1.00_changingSparsityAbs_128/results_model_249_original_class_0_0.5_-2/outf'
    # res_dir = '../experiments/graph_multi_video_with_L1_retF/graph_multi_video_with_L1_retF_aft_nonlin_RL_L2_non_lin_None_sparsify_percent_0.5_graph_size_2_sigmoid_False_graph_sum_True_deno_8_n_classes_100_in_out_2048_1024_feat_dim_2048_1024_feat_ret_True_method_cos_activitynet/all_classes_False_just_primary_False_limit_None_cw_False_BinaryCrossEntropyMultiBranchWithL1_CASL_250_step_250_0.1_0.001_0.001_0.001_lw_1.00_1.00_1.00_changingSparsityAbs_128/results_model_249_original_class_-1_-4_-2/outf'
    # res_dir = '../experiments/graph_multi_video_with_L1_retF/graph_multi_video_with_L1_retF_aft_nonlin_RL_L2_non_lin_None_sparsify_percent_0.5_graph_size_2_sigmoid_False_graph_sum_True_deno_8_n_classes_100_in_out_2048_1024_feat_dim_2048_1024_feat_ret_True_method_cos_activitynet/all_classes_False_just_primary_False_limit_None_cw_False_BinaryCrossEntropyMultiBranchWithL1_CASL_250_step_250_0.1_0.001_0.001_0.001_lw_3.00_1.00_1.00_changingSparsityAbs_128/results_model_249_original_class_-1_-4_-2/outf'
    # res_dir = '../experiments/graph_multi_video_with_L1_retF/graph_multi_video_with_L1_retF_aft_nonlin_RL_L2_non_lin_None_sparsify_percent_0.5_graph_size_2_sigmoid_True_graph_sum_True_deno_8_n_classes_100_in_out_2048_1024_feat_dim_2048_1024_feat_ret_True_method_cos_activitynet/all_classes_False_just_primary_False_limit_None_cw_False_MultiCrossEntropyMultiBranchWithL1_CASL_250_step_250_0.1_0.001_0.001_0.001_lw_1.00_1.00_1.00_fs_diff_changingSparsityAbs_128/results_model_149_original_class_-1_-4_-2/outf'
    # res_dir = '../experiments/graph_multi_video_with_L1_retF/graph_multi_video_with_L1_retF_aft_nonlin_RL_L2_non_lin_None_sparsify_0.5_graph_size_2_sigmoid_True_graph_sum_True_deno_8_n_classes_100_in_out_2048_1024_feat_dim_2048_1024_feat_ret_True_method_cos_activitynet/all_classes_False_just_primary_False_limit_None_cw_False_MultiCrossEntropyMultiBranchWithL1_CASL_250_step_250_0.1_0.001_0.001_0.001_lw_1.00_1.00_1.00__classicwithsig_128/results_model_99_original_class_0_-0.1_-2/outf'
    # res_dir = '../experiments/graph_multi_video_with_L1_retF/graph_multi_video_with_L1_retF_aft_nonlin_RL_L2_non_lin_None_sparsify_percent_0.5_graph_size_2_sigmoid_True_graph_sum_True_deno_1_n_classes_100_in_out_2048_1024_feat_dim_2048_1024_feat_ret_True_method_cos_activitynet/all_classes_False_just_primary_False_limit_None_cw_False_MultiCrossEntropyMultiBranchWithL1_CASL_250_step_250_0.1_0.001_0.001_0.001_lw_1.00_1.00_1.00_changingSparsityAbs_128/results_model_249_original_class_-1_-0.1_-2/outf'
    # res_dir = '../experiments/graph_multi_video_with_L1_retF/graph_multi_video_with_L1_retF_aft_nonlin_RL_L2_non_lin_None_sparsify_percent_0.5_graph_size_2_sigmoid_False_graph_sum_True_deno_self_determination_n_classes_100_in_out_2048_1024_feat_dim_2048_1024_feat_ret_True_method_cos_activitynet/all_classes_False_just_primary_False_limit_None_cw_False_BinaryCrossEntropyMultiBranchWithL1_CASL_250_step_250_0.1_0.001_0.001_0.001_lw_1.00_1.00_1.00_mindeno8_changingSparsityAbs_128/results_model_249_original_class_-1_-2_-2/outf'
    # res_dir = '../experiments/graph_multi_video_with_L1_retF/graph_multi_video_with_L1_retF_aft_nonlin_RL_L2_non_lin_None_sparsify_percent_0.5_graph_size_2_sigmoid_True_graph_sum_True_deno_random_n_classes_100_in_out_2048_1024_feat_dim_2048_1024_feat_ret_True_method_cos_activitynet/all_classes_False_just_primary_False_limit_None_cw_False_MultiCrossEntropyMultiBranchWithL1_CASL_250_step_250_0.1_0.001_0.001_0.001_lw_1.00_1.00_1.00_changingSparsityAbs_128/results_model_149_original_class_-1_-0.1_-2/outf'
    # res_dir = '../experiments/graph_multi_video_with_L1_retF/graph_multi_video_with_L1_retF_aft_nonlin_RL_L2_non_lin_None_sparsify_percent_0.5_graph_size_2_sigmoid_False_graph_sum_True_deno_random_n_classes_100_in_out_2048_1024_feat_dim_2048_1024_feat_ret_True_method_cos_activitynet/all_classes_False_just_primary_False_limit_None_cw_False_BinaryCrossEntropyMultiBranchWithL1_CASL_250_step_250_0.1_0.001_0.001_0.001_lw_1.00_1.00_1.00_changingSparsityAbs_128/results_model_149_original_class_-1_0.5_-2/outf'
    # res_dir = '../experiments/graph_multi_video_with_L1_retF/graph_multi_video_with_L1_retF_aft_nonlin_RL_L2_non_lin_None_sparsify_percent_0.5_graph_size_2_sigmoid_False_graph_sum_True_deno_random_n_classes_100_in_out_2048_1024_feat_dim_2048_1024_feat_ret_True_method_cos_activitynet/all_classes_False_just_primary_False_limit_None_cw_False_BinaryCrossEntropyMultiBranchWithL1_CASL_250_step_250_0.1_0.001_0.001_0.001_lw_1.00_1.00_1.00_weighted_changingSparsityAbs_128/results_model_49_original_class_-1_-2_-2/outf'
    # dataset = 'anet'

    anno_file = '../data/ucf101/train_test_files/test.txt'
    # res_dir = '../experiments/graph_multi_video_with_L1_retF/graph_multi_video_with_L1_retF_aft_nonlin_RL_L2_non_lin_None_sparsify_percent_0.5_graph_size_2_sigmoid_False_graph_sum_True_deno_8_n_classes_20_in_out_2048_1024_feat_dim_2048_1024_feat_ret_True_method_cos_ucf/all_classes_False_just_primary_False_limit_None_cw_False_MultiCrossEntropyMultiBranchWithL1_CASL_250_step_250_0.1_0.001_0.001_0.001_lw_1.00_1.00_1.00_changingSparsityAbs_0/results_model_249_original_class_0_0.5_-2/outf'
    # res_dir = '../experiments/graph_multi_video_with_L1_retF/graph_multi_video_with_L1_retF_aft_nonlin_RL_L2_non_lin_None_sparsify_percent_0.5_graph_size_2_sigmoid_False_graph_sum_True_deno_8_n_classes_20_in_out_2048_1024_feat_dim_2048_1024_feat_ret_True_method_cos_ucf/all_classes_False_just_primary_False_limit_None_cw_False_BinaryCrossEntropyMultiBranchWithL1_CASL_250_step_250_0.1_0.001_0.001_0.001_lw_1.00_1.00_1.00_changingSparsityAbs_0/results_model_249_original_class_0.0_0.5_-2/outf'
    # res_dir = '../experiments/graph_multi_video_with_L1_retF/graph_multi_video_with_L1_retF_aft_nonlin_RL_L2_non_lin_None_sparsify_percent_0.5_graph_size_2_sigmoid_True_graph_sum_True_deno_8_n_classes_20_in_out_2048_1024_feat_dim_2048_1024_feat_ret_True_method_cos_ucf/all_classes_False_just_primary_False_limit_None_cw_False_MultiCrossEntropyMultiBranchWithL1_CASL_500_step_500_0.1_0.001_0.001_0.001_lw_1.00_1.00_1.00_changingSparsityAbs_0/results_model_249_original_class_0.0_0.5_-2/outf'
    res_dir = '../experiments/graph_multi_video_with_L1_retF/graph_multi_video_with_L1_retF_aft_nonlin_RL_L2_non_lin_None_sparsify_percent_0.5_graph_size_2_sigmoid_False_graph_sum_True_deno_8_n_classes_20_in_out_2048_1024_feat_dim_2048_1024_feat_ret_True_method_cos_ucf/all_classes_False_just_primary_False_limit_None_cw_False_BinaryCrossEntropyMultiBranchWithL1_CASL_250_step_250_0.1_0.001_0.001_0.001_lw_1.00_1.00_1.00_hardtanh01_changingSparsityAbs_0/results_model_249_original_class_-1_-0.1_-2/outf'
    dataset = 'ucf'

    fps_stuff = 16. / 25.
    threshold = 0.75

    anno_file = '../data/charades/train_test_files/i3d_charades_both_test.txt'
    # res_dir = '../experiments/graph_multi_video_with_L1_retF/graph_multi_video_with_L1_retF_aft_nonlin_RL_L2_non_lin_None_sparsify_percent_0.5_graph_size_2_sigmoid_True_graph_sum_True_deno_8_n_classes_157_in_out_2048_1024_feat_dim_2048_1024_feat_ret_True_method_cos_charades_i3d_charades_both/all_classes_False_just_primary_False_limit_None_cw_False_MultiCrossEntropyMultiBranchWithL1_CASL_250_step_250_0.1_0.001_0.001_0.001_lw_1.00_1.00_1.00_changingSparsityAbs_numSim_128/results_model_249_original_class_0_-0.1_-2/outf'
    res_dir = '../experiments/graph_multi_video_with_L1_retF_tanh/graph_multi_video_with_L1_retF_tanh_aft_nonlin_RL_L2_non_lin_None_sparsify_percent_0.5_graph_size_2_sigmoid_True_graph_sum_True_deno_8_n_classes_157_in_out_2048_1024_feat_dim_2048_1024_feat_ret_True_method_cos_charades_i3d_charades_both/all_classes_False_just_primary_False_limit_None_cw_False_MultiCrossEntropyMultiBranchWithL1_CASL_250_step_250_0.1_0.001_0.001_0.001_lw_1.00_1.00_1.00_changingSparsityAbs_numSim_128/results_model_249_original_class_justraw_0_-0.9_-2/outf'
    dataset = 'charades'
    # fps_stuff = 1./6.
    # out_dir = '../scratch/looking_at_'+dataset+'/viz_pred_gt_percent_abs_0.5_thresh_ztest_per_class_all'
    # out_dir = '../scratch/looking_at_'+dataset+'/viz_pred_gt_percent_abs_0.5_thresh_otsu_individual'
    # out_dir = '../scratch/looking_at_'+dataset+'/viz_pred_median_mean_thresh_otsu_individual'
    out_dir = '../scratch/looking_at_' + dataset + '/viz_pred_percent_0.5_mce_tanh'
    # _bce_3_1_1_249'
    util.makedirs(out_dir)

    # anno_npz = '../data/activitynet/gt_npys/val_pruned.npz'

    out_files = glob.glob(os.path.join(res_dir, '*.npy'))
    out_files = np.array(out_files)

    # class_thresholds = get_z_test_all(out_files, threshold)

    anno_files, labels = read_anno_file(anno_file)

    anno_jnames = np.array(
        [os.path.split(anno_file)[1] for anno_file in anno_files])
    out_jnames = np.array(
        [os.path.split(out_file)[1] for out_file in out_files])

    num_classes = labels.shape[1]

    bin_out_files = []
    for class_idx in range(num_classes):
        rel_bin = labels[:, class_idx] > 0
        rel_anno_jnames = anno_jnames[rel_bin]
        rel_bin_out_files = np.in1d(out_jnames, rel_anno_jnames)
        bin_out_files.append(rel_bin_out_files)

    # print len(bin_out_files), bin_out_files[0].shape, np.sum(bin_out_files[0])
    if dataset == 'anet':
        gt_vid_names, gt_class_names, gt_time_intervals = et.load_activitynet_gt(
            False)
        class_names = globals.class_names_activitynet
    elif dataset == 'ucf':
        gt_vid_names, gt_class_names, gt_time_intervals = et.load_ucf_gt(False)
        class_names = globals.class_names
    elif dataset == 'charades':
        # gt_vid_names, gt_class_names, gt_time_intervals = et.load_ucf_gt(False)
        # class_names = globals.class_names
        class_names = globals.class_names_charades
        gt_vid_names, gt_class_names, gt_time_intervals = et.load_charades_gt(
            False)
        # overlap_thresh_all = np.arange(0.1,0.2,0.1)
        # aps = np.zeros((len(class_names)+1,1))
        # fps_stuff = 16./25.

    gt_vid_names = np.array(gt_vid_names)
    gt_class_names = np.array(gt_class_names)
    gt_time_intervals = np.array(gt_time_intervals)

    # get threshold for each class
    # class_thresholds = get_min_max_all(out_files, threshold)
    # print class_thresholds.shape
    # return

    # return
    # class_thresholds = get_threshold_val(out_files, bin_out_files, threshold = threshold)

    n_bins = 10
    for class_idx, bin_class in enumerate(bin_out_files):
        rel_files = out_files[bin_class]
        rel_class_name = class_names[class_idx]

        out_dir_curr = os.path.join(out_dir, rel_class_name)
        util.mkdir(out_dir_curr)

        bin_class = gt_class_names == rel_class_name

        for rel_file in rel_files:
            pred_vals = np.load(rel_file)[:, class_idx]

            max_det_conf = np.max(pred_vals)
            min_det_conf = np.min(pred_vals)

            old_thresh = min_det_conf + (max_det_conf - min_det_conf) / 2.
            # hist, bin_edges = np.histogram(pred_vals, n_bins)
            # otsu_val = otsu_method(hist)

            # print bin_edges, min_det_conf, max_det_conf
            # print otsu_val, np.argmax(otsu_val)
            # idx_max = np.argmax(otsu_val)
            # print bin_edges[idx_max]
            # new_thresh = (bin_edges[idx_max]+bin_edges[idx_max+1])/2
            # print new_thresh
            # raw_input()
            # # bin_edges = bin_edges[1:]

            # print hist.shape, bin_edges.shape, otsu_val.shape
            # new_thresh = bin_edges[np.argmax(otsu_val)]
            new_thresh = get_otsu_thresh(pred_vals, n_bins)

            # new_thresh = get_z_test(pred_vals)

            # new_thresh = class_thresholds[class_idx]

            out_shape_curr = len(pred_vals)

            rel_name = os.path.split(rel_file)[1][:-4]
            bin_vid = gt_vid_names == rel_name

            rel_gt_time = gt_time_intervals[np.logical_and(bin_vid, bin_class)]
            # print pred_vals.shape
            # print rel_gt_time
            # print rel_name
            # raw_input()
            det_times = np.array(range(0, out_shape_curr)) * fps_stuff
            gt_vals = np.zeros(det_times.shape)

            for gt_time_curr in rel_gt_time:
                idx_start = np.argmin(np.abs(det_times - gt_time_curr[0]))
                idx_end = np.argmin(np.abs(det_times - gt_time_curr[1]))
                gt_vals[idx_start:idx_end] = max_det_conf

            gt_vals[gt_vals == 0] = min_det_conf

            out_file_viz = os.path.join(out_dir_curr, rel_name + '.jpg')
            out_file_hist = os.path.join(out_dir_curr, rel_name + '_hist.jpg')

            plot_arr = [(det_times, pred_vals), (det_times, gt_vals)]
            plot_arr += [(det_times, np.ones(det_times.shape) * old_thresh),
                         (det_times, np.ones(det_times.shape) * new_thresh)]
            legend_entries = ['Pred', 'GT']
            legend_entries += ['Old', 'New ']
            title = 'det conf over time'
            # print out_file_viz

            visualize.hist(pred_vals,
                           out_file_hist,
                           bins=n_bins,
                           normed=True,
                           xlabel='Value',
                           ylabel='Frequency',
                           title=title)
            visualize.plotSimple(plot_arr,
                                 out_file=out_file_viz,
                                 title=title,
                                 xlabel='Time',
                                 ylabel='Detection Confidence',
                                 legend_entries=legend_entries)
            # raw_input()

        visualize.writeHTMLForFolder(out_dir_curr)
        print out_dir_curr
Exemple #16
0
def main():
    out_dir_figures = '../experiments/figures/mmi'
    util.makedirs(out_dir_figures)

    dir_exp_meta = '../experiments/khorrami_capsule_7_3_bigclass3'
    pre_split = 'mmi_96_'
    post_split = '_reconstruct_True_True_all_aug_margin_False_wdecay_0_300_exp_0.96_350_1e-06_0.001_0.001_0.001_lossweights_1.0_100.0'
    # '_reconstruct_True_True_all_aug_margin_False_wdecay_0_300_exp_0.96_350_1e-06_0.001_0.001_0.001'

    str_titles = ['Hard Training', 'Easy Training']
    range_splits = [0, 1]
    str_titles = ['Easy Training']
    range_splits = [1]
    range_tests = range(0, 300, 10) + [299]
    test_post = ''

    accus_us = []
    accus_hard = get_values_from_logs(dir_exp_meta, pre_split, post_split,
                                      range_splits, range_tests, test_post)
    test_post = '_easy'
    accus_easy = get_values_from_logs(dir_exp_meta, pre_split, post_split,
                                      range_splits, range_tests, test_post)
    accus_us = [accus_hard, accus_easy]

    dir_exp_meta = '../experiments/khorrami_ck_96_caps_bl'
    # 0_train_test_files_khorrami_ck_96_300_exp_0.96_350_1e-06_0.001_0.001/
    # '../experiments/khorrami_capsule_7_3_bigclass3'
    pre_split = 'mmi_96_'
    # 'mmi_96_'
    # /mmi_96_1
    post_split = '_train_test_files_khorrami_ck_96_300_exp_0.96_350_1e-06_0.001_0.001'

    test_post = ''
    accus_hard = get_values_from_logs(dir_exp_meta, pre_split, post_split,
                                      range_splits, range_tests, test_post)

    test_post = '_easy'
    accus_easy = get_values_from_logs(dir_exp_meta, pre_split, post_split,
                                      range_splits, range_tests, test_post)
    accus_them = [accus_hard, accus_easy]
    # print accus_hard
    # print accus_easy
    # raw_input()

    str_tests = ['Hard Test', 'Easy Test']
    for fold_curr, str_title in enumerate(str_titles):
        plot_vals = []
        legend_entries = []
        for idx_ease, str_test in enumerate(str_tests):
            # for idx_us,(us,them) in enumerate( zip(accus_us,accus_them)):
            plot_vals.append((range_tests, accus_us[idx_ease][fold_curr]))
            plot_vals.append((range_tests, accus_them[idx_ease][fold_curr]))
            legend_entries.extend(['Ours ' + str_test, 'BL ' + str_test])

        figure_name = str_title.lower().replace(' ', '_') + '.jpg'
        out_file_curr = os.path.join(out_dir_figures, figure_name)
        visualize.plotSimple(plot_vals,
                             out_file=out_file_curr,
                             title=str_title,
                             xlabel='Epoch',
                             ylabel='Accuracy',
                             legend_entries=legend_entries)
        print out_file_curr

    return

    dir_exp_meta = '../experiments/khorrami_capsule_7_33'
    pre_split = 'ck_96_'
    post_split = '_reconstruct_True_True_all_aug_margin_False_wdecay_0_600_step_600_0.1_0.001_0.001_0.001'
    range_splits = range(10)
    print_accuracy(dir_exp_meta,
                   pre_split,
                   post_split,
                   range_splits,
                   log='log.txt')

    return

    dir_exp_meta = '../experiments/khorrami_caps_k7_s3_oulu'
    pre_split = 'oulu_single_'
    post_split = '_all_aug_nopool_300_step_150_0.1_0.001'
    num_splits = 10
    model_num = 299

    dir_exp_meta = '../experiments/caps_heavy_48'
    pre_split = 'oulu_single_'
    post_split = '_all_aug_nopool_200_step_100_0.1_0.001'
    num_splits = 10
    model_num = 199

    # dir_exp_meta = '../experiments/khorrami_caps_k7_s3_oulu_class_weights'
    # pre_split = 'oulu_single_im_'
    # post_split = '_all_aug_nopool_300_step_150_0.1_0.01'
    # num_splits = 1
    # model_num = 299

    # dir_exp_meta = '../experiments/khorrami_caps_k7_s3_oulu_class_weights'
    # pre_split = 'oulu_single_im_'
    # post_split = '_all_aug_nopool_300_step_150_0.1_0.0005'
    # num_splits = 1
    # model_num = 299

    dir_exp_meta = '../experiments/khorrami_caps_k7_s3_oulu_spread_0.2'
    pre_split = 'oulu_single_im_'
    post_split = '_all_aug_nopool_300_step_150_0.1_0.001'
    num_splits = 1
    model_num = 299

    dir_exp_meta = '../experiments/khorrami_caps_k7_s3_oulu_spread_0.2_vl_gray_r_1_init/'
    pre_split = 'oulu_three_im_no_neutral_just_strong_'
    post_split = '_all_aug_max_300_step_300_0.1_0.001'
    num_splits = 10
    model_num = 299

    num_classes = 6
    class_labels = [
        'Anger', 'Disgust', 'Fear', 'Happiness', 'Sadness', 'Surprise'
    ]

    ######
    dir_exp_meta = '../experiments/khorrami_caps_k7_s3_oulu_spread_0.2_vl_gray_r_3_init_correct_out'
    num_splits = range(10)

    dir_exp_meta = '../experiments/khorrami_caps_k7_s3_oulu_spread_0.2_vl_gray_r_2_init_correct_out'
    num_splits = [0, 1]

    dir_exp_meta = '../experiments/khorrami_caps_k7_s3_oulu_spread_0.2_vl_gray_r_1_init_correct_out'
    num_splits = range(10)

    pre_split = 'oulu_three_im_no_neutral_just_strong_'
    post_split = '_all_aug_max_300_step_300_0.1_0.001'
    model_num = 299
    ######

    dir_exp_meta = '../experiments/oulu_r3_hopeful'
    num_splits = range(10)

    # dir_exp_meta = '../experiments/khorrami_caps_k7_s3_oulu_spread_0.2_vl_gray_r_2_init_correct_out'
    # num_splits = [0,1]

    # dir_exp_meta = '../experiments/khorrami_caps_k7_s3_oulu_spread_0.2_vl_gray_r_1_init_correct_out'
    # num_splits = range(10)

    pre_split = 'oulu_three_im_no_neutral_just_strong_True_'
    post_split = '_wdecay_all_aug_max_500_step_500_0.1_0.0001'

    # for model_num in ['499','499_center']:
    # ,'bestVal','bestVal_center']:
    # print 'MODEL TYPE',model_num
    # log = 'log_test_center.txt' if model_num.endswith('_center') else 'log_test.txt'
    # get_per_label_accuracy(dir_exp_meta,pre_split,post_split,num_splits,model_num,class_labels)
    # print_accuracy(dir_exp_meta,pre_split,post_split,num_splits,log = log)
    # view_loss_curves(dir_exp_meta,pre_split,post_split,num_splits,model_num)

    ####

    dir_exp_meta = '../experiments/oulu_vgg_r1_noinit_preprocessed'
    num_splits = range(10)

    # dir_exp_meta = '../experiments/khorrami_caps_k7_s3_oulu_spread_0.2_vl_gray_r_2_init_correct_out'
    # num_splits = [0,1]

    # dir_exp_meta = '../experiments/khorrami_caps_k7_s3_oulu_spread_0.2_vl_gray_r_1_init_correct_out'
    # num_splits = range(10)
    pre_splits = ['oulu_three_im_no_neutral_just_strong_False_'] * 3
    model_names = [
        'vgg_capsule_disfa', 'vgg_capsule_disfa_bigprimary',
        'vgg_capsule_disfa_bigclass'
    ]
    post_splits = [
        '_' + model_name + '_all_aug_wdecay_0_50_step_50_0.1_1e-05_0.0001'
        for model_name in model_names
    ]

    for pre_split, post_split in zip(pre_splits, post_splits):
        for model_num in ['49']:
            print 'MODEL TYPE', model_num
            get_per_label_accuracy(dir_exp_meta, pre_split, post_split,
                                   num_splits, model_num, class_labels)
            print_accuracy(dir_exp_meta, pre_split, post_split, num_splits)
            view_loss_curves(dir_exp_meta, pre_split, post_split, num_splits,
                             model_num)
Exemple #17
0
def get_distance_from_perfect(model_file, graph_num):

    out_dir_meta = model_file[:model_file.rindex('.')]
    out_dir_meta_meta = out_dir_meta + '_graph_etc'

    out_dir_meta = out_dir_meta_meta + '_' + str(graph_num)
    out_dir_viz = out_dir_meta + '_dist_perfectG'
    print out_dir_viz

    util.mkdir(out_dir_viz)
    assert os.path.exists(out_dir_meta)
    vid_files = glob.glob(os.path.join(out_dir_meta, '*validation*.npz'))

    class_collations = [[] for idx in range(len(class_names))]
    class_collations_pos = [[] for idx in range(len(class_names))]
    viz = True
    threshes = np.arange(0.1, 1.1, 0.1)
    print threshes

    for vid_file in vid_files:
        print vid_file
        npz_data = np.load(vid_file)
        vid_file = os.path.split(vid_file)[1]
        affinity = npz_data['affinity']

        gt_vecs = npz_data['gt_vecs']
        gt_classes = npz_data['gt_classes']

        if viz:
            out_file = os.path.join(
                out_dir_viz, vid_file[:vid_file.rindex('.')] + '_' +
                str(graph_num) + '.jpg')
            visualize.saveMatAsImage(affinity, out_file)

            plotter = []
            legend_entries = []
            for gt_idx, gt_class in enumerate(gt_classes):
                gt_vec = gt_vecs[gt_idx]
                gt_vec = gt_vec / np.max(gt_vec)
                gt_vec = gt_vec * (gt_idx + 1)
                x_axis = range(gt_vec.size)
                plotter.append((x_axis, gt_vec))
                legend_entries.append(class_names[gt_class])

            out_file = os.path.join(
                out_dir_viz, vid_file[:vid_file.rindex('.')] + '_gt.jpg')
            visualize.plotSimple(plotter,
                                 out_file=out_file,
                                 xlabel='time',
                                 ylabel='',
                                 legend_entries=legend_entries)

        for idx_gt, gt_vec in enumerate(gt_vecs):
            gt_class = gt_classes[idx_gt]
            class_name = class_names[gt_class]
            gt_vec = gt_vec[:, np.newaxis]
            perfectG = np.dot(gt_vec, gt_vec.T)
            aff = np.array(affinity)
            aff_just_pos = aff * perfectG
            diff = get_l2_diff(aff, perfectG, threshes)
            diff_pos = get_l2_diff(aff_just_pos, perfectG, threshes)

            class_collations[gt_class].append(diff)
            class_collations_pos[gt_class].append(diff_pos)

            plotter = [(threshes, diff), (threshes, diff_pos)]
            legend_entries = ['All', 'Pos']
            out_file = os.path.join(
                out_dir_viz, vid_file[:vid_file.rindex('.')] + '_' +
                class_name + '_diff.jpg')
            visualize.plotSimple(plotter,
                                 out_file=out_file,
                                 xlabel='Thresh',
                                 ylabel='Diff',
                                 legend_entries=legend_entries)

            if viz:
                out_file = os.path.join(
                    out_dir_viz, vid_file[:vid_file.rindex('.')] + '_' +
                    class_name + '_perfectG.jpg')
                visualize.saveMatAsImage(perfectG, out_file)

        visualize.writeHTMLForFolder(out_dir_viz)

    for idx_class in range(len(class_names)):

        class_name = class_names[idx_class]
        cc = np.array(class_collations[idx_class])
        ccp = np.array(class_collations_pos[idx_class])
        cc = np.mean(cc, axis=0)
        ccp = np.mean(ccp, axis=0)
        plotter = [(threshes, cc), (threshes, ccp)]
        legend_entries = ['All', 'Pos']
        out_file = os.path.join(out_dir_viz,
                                'average_' + class_name + '_diff.jpg')
        visualize.plotSimple(plotter,
                             out_file=out_file,
                             title=class_name,
                             xlabel='Thresh',
                             ylabel='Diff',
                             legend_entries=legend_entries)

    visualize.writeHTMLForFolder(out_dir_viz)
Exemple #18
0
def overfitting():
    out_dir = '../experiments/figures/overfitting'
    util.makedirs(out_dir)

    # dirs = []
    # dir_meta = '../experiments/showing_overfitting_justhflip_khorrami_capsule_7_31'
    # dir_curr = os.path.join(dir_meta,'oulu_96_three_im_no_neutral_just_strong_False_0_reconstruct_False_True_all_aug_margin_False_wdecay_0_600_step_600_0.1_0.001_0.001')
    # dirs.append(dir_curr)
    # dir_meta = '../experiments/showing_overfitting_justhflip_khorrami_capsule_7_33'
    # dir_r3 = 'oulu_96_three_im_no_neutral_just_strong_False_0_reconstruct_False_True_all_aug_margin_False_wdecay_0_600_step_600_0.1_0.001_0.001'
    # dir_r3_lw_eq = 'oulu_96_three_im_no_neutral_just_strong_False_0_reconstruct_True_True_all_aug_margin_False_wdecay_0_600_step_600_0.1_0.001_0.001_0.001'
    # dir_r3_lw_b = 'oulu_96_three_im_no_neutral_just_strong_False_0_reconstruct_True_True_all_aug_margin_False_wdecay_0_600_step_600_0.1_0.001_0.001_0.001_lossweights_1.0_100.0'

    dirs = []
    # dir_meta = '../experiments/showing_overfitting_justhflip_khorrami_capsule_7_31'
    # dir_curr = os.path.join(dir_meta,'oulu_96_three_im_no_neutral_just_strong_False_9_reconstruct_False_True_all_aug_margin_False_wdecay_0_600_step_600_0.1_0.001_0.001')
    # dirs.append(dir_curr)
    dir_meta = '../experiments/showing_overfitting_justhflip_khorrami_capsule_7_33'
    dir_r3 = 'oulu_96_three_im_no_neutral_just_strong_False_9_reconstruct_False_True_all_aug_margin_False_wdecay_0_600_step_600_0.1_0.001_0.001'
    dir_r3_lw_eq = 'oulu_96_three_im_no_neutral_just_strong_False_9_reconstruct_True_True_all_aug_margin_False_wdecay_0_600_step_600_0.1_0.001_0.001_0.001_lossweights_1.0_1.0'
    dir_r3_lw_b = 'oulu_96_three_im_no_neutral_just_strong_False_9_reconstruct_True_True_all_aug_margin_False_wdecay_0_600_step_600_0.1_0.001_0.001_0.001_lossweights_1.0_100.0'

    dir_r3_do = '../experiments/showing_overfitting_justhflip_khorrami_capsule_7_3_with_dropout3/oulu_96_three_im_no_neutral_just_strong_False_9_reconstruct_False_True_all_aug_margin_False_wdecay_0_600_step_600_0.1_0.001_0.001_0.5'

    dirs_to_pend = [dir_r3, dir_r3_lw_eq, dir_r3_lw_b]

    for dir_curr in dirs_to_pend:
        dir_curr = os.path.join(dir_meta, dir_curr)
        dirs.append(dir_curr)

    dirs.append(dir_r3_do)

    window = 10
    val_lim = 600
    epoch_range = range(window - 1, val_lim)

    out_file = os.path.join(out_dir, 'val_accuracy_9_do.png')
    xAndYs = []
    legend_entries = ['R3+0', 'R3+1e-7', 'R3+1e-5', 'R3+DO']
    for dir_curr in dirs:
        log_file_curr = os.path.join(dir_curr, 'log.txt')
        val_losses = [
            line_curr for line_curr in util.readLinesFromFile(log_file_curr)
            if 'val accuracy' in line_curr
        ]
        val_losses = [
            float(line_curr.split(' ')[-1]) for line_curr in val_losses
        ]
        val_losses = val_losses[:val_lim]
        print len(val_losses)

        val_losses = np.convolve(val_losses,
                                 np.ones((window, )) / window,
                                 mode='valid')

        xAndYs.append((epoch_range, val_losses))
    visualize.plotSimple(xAndYs,
                         out_file=out_file,
                         xlabel='Epoch',
                         ylabel='Validation Accuracy',
                         legend_entries=legend_entries,
                         ylim=[0.6, 0.8],
                         outside=True)
Exemple #19
0
def trying_it_out():
    # dir_graphs = '../experiments/graph_multi_video_multi_F_joint_train_normalize_True_True_non_lin_HT_sparsify_True_graph_size_2_deno_8_n_classes_20_in_out_2048_64_feat_dim_2048_64_method_cos_ucf/all_classes_False_just_primary_False_limit_500_cw_True_MultiCrossEntropyMultiBranch_300_step_300_0.1_0.001_0.001_lw_0.5_0.5_ABS/model_299_graph_etc'

    # out_dir_meta = '../scratch/spanning_shenanigans'
    # util.mkdir(out_dir_meta)

    model_file = '../experiments/graph_multi_video_multi_F_joint_train_normalize_True_True_non_lin_HT_sparsify_True_graph_size_2_deno_8_n_classes_20_in_out_2048_64_64_feat_dim_2048_64_64_64_gk_2_method_cos_ucf/all_classes_False_just_primary_False_limit_500_cw_True_MultiCrossEntropyMultiBranch_300_step_300_0.1_0.001_0.001_lw_1_1_1_ABS/model_299.pt'

    graph_num = 1
    dir_graphs = model_file[:model_file.rindex('.')]
    dir_graphs = dir_graphs + '_graph_etc'
    if graph_num is not None:
        dir_graphs += '_' + str(graph_num)

    # out_dir_meta = '../scratch/spanning_shenanigans'
    out_dir_meta = os.path.join(
        os.path.split(dir_graphs)[0], 'spanning_shenanigans')
    util.mkdir(out_dir_meta)

    vid_name = 'video_test_0000273'
    # oracle = False
    # idx = 1
    # thresh = 0.85

    oracle = False
    thresh = 0.9999
    branch_pred = 1
    idx = 0

    str_dir = '_'.join([str(val) for val in [vid_name, oracle, idx, thresh]])
    out_dir_curr = os.path.join(out_dir_meta, str_dir)
    util.mkdir(out_dir_curr)

    npz_file = os.path.join(dir_graphs, vid_name + '.npz')
    npz_data = np.load(npz_file)
    gt_vecs = npz_data['gt_vecs']
    affinity = npz_data['affinity']
    x_all = npz_data['x_all']
    gt_classes = npz_data['gt_classes']

    gt_vec = gt_vecs[idx]
    gt_vec[gt_vec > 0] = 1

    gt_class = gt_classes[idx]

    nodes_kept, edge_threshes, gt_count, _ = get_spanned_nodes(
        gt_class,
        x_all[branch_pred],
        affinity,
        thresh,
        deno=8,
        oracle=oracle,
        gt_vec=gt_vec)

    precision_all = []
    recall_all = []

    for idx_n in range(len(nodes_kept)):
        idx_str = str(idx_n)
        idx_str = '0' * (4 - len(idx_str)) + idx_str
        out_file_curr = os.path.join(out_dir_curr, idx_str + '.jpg')

        nodes_kept_curr = nodes_kept[:idx_n + 1]
        pred = np.zeros(gt_vec.shape)
        pred[nodes_kept_curr] = 1

        precision = sklearn.metrics.precision_score(gt_vec, pred, labels=[1])
        recall = sklearn.metrics.recall_score(gt_vec, pred, labels=[1])
        # print precision, recall
        # raw_input()
        precision_all.append(precision)
        recall_all.append(recall)

        x_axis = np.array(range(gt_vec.size))
        title_curr = ' '.join(
            ['prec', '%.2f' % precision, 'rec',
             '%.2f' % recall])

        visualize.plotSimple([(x_axis, pred), (x_axis, gt_vec)],
                             out_file=out_file_curr,
                             title=title_curr,
                             xlabel='time',
                             ylabel='det conf',
                             legend_entries=['Det', 'GT'])

    out_file_curr = os.path.join(out_dir_curr, 'prec_rec.jpg')
    visualize.plotSimple([(recall_all, precision_all)],
                         out_file=out_file_curr,
                         title='prec_rec',
                         xlabel='Recall',
                         ylabel='Precision')

    visualize.writeHTMLForFolder(out_dir_curr)
    print out_dir_curr
Exemple #20
0
def overfitting_do():
    out_dir = '../experiments_dropout/figures/overfitting'
    util.makedirs(out_dir)

    dir_meta_r1 = '../experiments_dropout/khorrami_capsule_7_3_bigclass_with_dropout_1'
    dir_meta_r3 = '../experiments_dropout/khorrami_capsule_7_3_bigclass_with_dropout_3'
    dirs_post = [
        'oulu_96_train_test_files_preprocess_vl_9_reconstruct_False_none_600_step_600_0.1_0.001_0.001_0.0',
        'oulu_96_train_test_files_preprocess_vl_9_reconstruct_False_none_600_step_600_0.1_0.001_0.001_0.5',
        'oulu_96_train_test_files_preprocess_vl_9_reconstruct_False_hs_flip_600_step_600_0.1_0.001_0.001_0.0',
        'oulu_96_train_test_files_preprocess_vl_9_reconstruct_False_hs_flip_600_step_600_0.1_0.001_0.001_0.5'
    ]
    meta_legend_entries = ['R1', 'R3']
    sub_legend_entries = ['None 0', 'None 0.5', 'HS+Flip 0', 'HS+Flip 0.5']
    dirs = [
        os.path.join(dir_meta_curr, dir_curr)
        for dir_meta_curr in [dir_meta_r1, dir_meta_r3]
        for dir_curr in dirs_post
    ]
    legend_entries = [
        meta_legend + ' ' + sub_legend for meta_legend in meta_legend_entries
        for sub_legend in sub_legend_entries
    ]

    # # dirs = []
    # # dir_meta = '../experiments/showing_overfitting_justhflip_khorrami_capsule_7_31'
    # # dir_curr = os.path.join(dir_meta,'oulu_96_three_im_no_neutral_just_strong_False_0_reconstruct_False_True_all_aug_margin_False_wdecay_0_600_step_600_0.1_0.001_0.001')
    # # dirs.append(dir_curr)
    # # dir_meta = '../experiments/showing_overfitting_justhflip_khorrami_capsule_7_33'
    # # dir_r3 = 'oulu_96_three_im_no_neutral_just_strong_False_0_reconstruct_False_True_all_aug_margin_False_wdecay_0_600_step_600_0.1_0.001_0.001'
    # # dir_r3_lw_eq = 'oulu_96_three_im_no_neutral_just_strong_False_0_reconstruct_True_True_all_aug_margin_False_wdecay_0_600_step_600_0.1_0.001_0.001_0.001'
    # # dir_r3_lw_b = 'oulu_96_three_im_no_neutral_just_strong_False_0_reconstruct_True_True_all_aug_margin_False_wdecay_0_600_step_600_0.1_0.001_0.001_0.001_lossweights_1.0_100.0'

    # dirs = []
    # # dir_meta = '../experiments/showing_overfitting_justhflip_khorrami_capsule_7_31'
    # # dir_curr = os.path.join(dir_meta,'oulu_96_three_im_no_neutral_just_strong_False_9_reconstruct_False_True_all_aug_margin_False_wdecay_0_600_step_600_0.1_0.001_0.001')
    # # dirs.append(dir_curr)
    # dir_meta = '../experiments/showing_overfitting_justhflip_khorrami_capsule_7_33'
    # dir_r3 = 'oulu_96_three_im_no_neutral_just_strong_False_9_reconstruct_False_True_all_aug_margin_False_wdecay_0_600_step_600_0.1_0.001_0.001'
    # dir_r3_lw_eq = 'oulu_96_three_im_no_neutral_just_strong_False_9_reconstruct_True_True_all_aug_margin_False_wdecay_0_600_step_600_0.1_0.001_0.001_0.001_lossweights_1.0_1.0'
    # dir_r3_lw_b = 'oulu_96_three_im_no_neutral_just_strong_False_9_reconstruct_True_True_all_aug_margin_False_wdecay_0_600_step_600_0.1_0.001_0.001_0.001_lossweights_1.0_100.0'

    # dir_r3_do = '../experiments/showing_overfitting_justhflip_khorrami_capsule_7_3_with_dropout3/oulu_96_three_im_no_neutral_just_strong_False_9_reconstruct_False_True_all_aug_margin_False_wdecay_0_600_step_600_0.1_0.001_0.001_0.5'

    # dirs_to_pend = [dir_r3,dir_r3_lw_eq,dir_r3_lw_b]

    # for dir_curr in dirs_to_pend:
    # 	dir_curr = os.path.join(dir_meta, dir_curr)
    # 	dirs.append(dir_curr)

    # dirs.append(dir_r3_do)

    window = 10
    val_lim = 600
    epoch_range = range(window - 1, val_lim)

    dirs = dirs[:4] + dirs[-2:]

    out_file = os.path.join(out_dir, 'val_accuracy_9_do.png')
    xAndYs = []
    # legend_entries = ['R3+0','R3+1e-7','R3+1e-5','R3+DO']
    for dir_curr in dirs:
        log_file_curr = os.path.join(dir_curr, 'log.txt')
        val_losses = [
            line_curr for line_curr in util.readLinesFromFile(log_file_curr)
            if 'val accuracy' in line_curr
        ]
        val_losses = [
            float(line_curr.split(' ')[-1]) for line_curr in val_losses
        ]
        val_losses = val_losses[:val_lim]
        print dir_curr, len(val_losses)

        val_losses = np.convolve(val_losses,
                                 np.ones((window, )) / window,
                                 mode='valid')

        xAndYs.append((epoch_range, val_losses))
    visualize.plotSimple(xAndYs,
                         out_file=out_file,
                         xlabel='Epoch',
                         ylabel='Validation Accuracy',
                         legend_entries=legend_entries,
                         ylim=[0.6, 0.8],
                         outside=True)
Exemple #21
0
def train_model(out_dir_train,
                train_data,
                test_data,
                batch_size = None,
                batch_size_val =None,
                num_epochs = 100,
                save_after = 20,
                disp_after = 1,
                plot_after = 10,
                test_after = 1,
                lr = 0.0001,
                dec_after = 100, 
                model_name = 'alexnet',
                criterion = nn.CrossEntropyLoss(),
                gpu_id = 0,
                num_workers = 0,
                model_file = None,
                epoch_start = 0):

    util.mkdir(out_dir_train)
    log_file = os.path.join(out_dir_train,'log.txt')
    plot_file = os.path.join(out_dir_train,'loss.jpg')
    log_arr = []
    plot_arr = [[],[]]
    plot_val_arr = [[],[]]

    network = models.get(model_name)
    # data_transforms = network.data_transforms
    if model_file is not None:
    #     model = network.model
    # else:
        network.model = torch.load(model_file)
    model = network.model

    # train_data = dataset(train_file,data_transforms['train'])
    # test_data = dataset(test_file,data_transforms['val'])
    
    if batch_size is None:
        batch_size = len(train_data)

    if batch_size_val is None:
        batch_size_val = len(test_data)

    train_dataloader = torch.utils.data.DataLoader(train_data, 
                        batch_size=batch_size,
                        shuffle=True, 
                        num_workers=0)
    
    test_dataloader = torch.utils.data.DataLoader(test_data, 
                        batch_size=batch_size_val,
                        shuffle=False, 
                        num_workers=num_workers)
    
    torch.cuda.device(gpu_id)
    
    model = model.cuda()
    model.train(True)
    
    optimizer = optim.SGD(network.get_lr_list(lr), lr=0, momentum=0.9)

    if dec_after is not None:
        exp_lr_scheduler = lr_scheduler.StepLR(optimizer, step_size=dec_after, gamma=0.1)

    for num_epoch in range(epoch_start,num_epochs):

        for num_iter_train,batch in enumerate(train_dataloader):
            
            data = Variable(batch['image'].cuda())
            labels = Variable(torch.LongTensor(batch['label']).cuda())
            optimizer.zero_grad()
            loss = criterion(model(data), labels)    
            loss_iter = loss.data[0]
            loss.backward()
            optimizer.step()
            
            
            num_iter = num_epoch*len(train_dataloader)+num_iter_train
            plot_arr[0].append(num_iter); plot_arr[1].append(loss_iter)

            str_display = 'lr: %.6f, iter: %d, loss: %.4f' %(optimizer.param_groups[-1]['lr'],num_iter,loss_iter)
            log_arr.append(str_display)
            print str_display

            if num_iter % plot_after== 0 and num_iter>0:
                util.writeFile(log_file, log_arr)
                if len(plot_val_arr[0])==0:
                    visualize.plotSimple([(plot_arr[0],plot_arr[1])],out_file = plot_file,title = 'Loss',xlabel = 'Iteration',ylabel = 'Loss',legend_entries=['Train'])
                else:
                    visualize.plotSimple([(plot_arr[0],plot_arr[1]),(plot_val_arr[0],plot_val_arr[1])],out_file = plot_file,title = 'Loss',xlabel = 'Iteration',ylabel = 'Loss',legend_entries=['Train','Val'])


        if num_epoch % test_after == 0 :
            model.eval()
            predictions = []
            labels_all = []
    
            for num_iter_test,batch in enumerate(test_dataloader):
                labels_all.append(batch['label'].numpy())
        
                data = Variable(batch['image'].cuda())
                labels = Variable(torch.LongTensor(batch['label']).cuda())
                output = model(data)
                
                out = output.data.cpu().numpy()
                predictions.append(np.argmax(out,1))                

                loss = criterion(output, labels)    
                loss_iter = loss.data[0]

                num_iter = num_epoch*len(train_dataloader)+num_iter_test
                plot_val_arr[0].append(num_iter); plot_val_arr[1].append(loss_iter)

                str_display = 'lr: %.6f, val iter: %d, val loss: %.4f' %(optimizer.param_groups[-1]['lr'],num_iter,loss_iter)
                log_arr.append(str_display)
                print str_display
            labels_all = np.concatenate(labels_all)
            predictions = np.concatenate(predictions)
            accuracy = np.sum(predictions==labels_all)/float(labels_all.size)
            str_display = 'val accuracy: %.4f' %(accuracy)
            log_arr.append(str_display)
            print str_display
            

            model.train(True)

        if num_epoch % save_after == 0:
            out_file = os.path.join(out_dir_train,'model_'+str(num_epoch)+'.pt')
            print 'saving',out_file
            torch.save(model,out_file)

        if dec_after is not None:
            exp_lr_scheduler.step()
    
    out_file = os.path.join(out_dir_train,'model_'+str(num_epoch)+'.pt')
    print 'saving',out_file
    torch.save(model,out_file)
    
    # print plot_arr[0]

    util.writeFile(log_file, log_arr)
    if len(plot_val_arr[0])==0:
        visualize.plotSimple([(plot_arr[0],plot_arr[1])],out_file = plot_file,title = 'Loss',xlabel = 'Iteration',ylabel = 'Loss',legend_entries=['Train'])
    else:
        visualize.plotSimple([(plot_arr[0],plot_arr[1]),(plot_val_arr[0],plot_val_arr[1])],out_file = plot_file,title = 'Loss',xlabel = 'Iteration',ylabel = 'Loss',legend_entries=['Train','Val'])   
Exemple #22
0
def viz_pascal():

    filename = '../contextlocnet-master/data/with_det_scores_test.h5'
    # out_dir = '../scratch/voc_2007_test_scores/det_viz'
    # out_dir = '../scratch/voc_2007_test_scores/det_viz_smallest'
    out_dir = '../scratch/voc_2007_test_scores/det_viz_biggest'
    det_dir = '../scratch/voc_2007_test_scores/output_softmax'

    util.mkdir(out_dir)

    f = h5py.File(filename, 'r')
    labels = f['labels']

    label_strs = [
        'aeroplane', 'bicycle', 'bird', 'boat', 'bottle', 'bus', 'car', 'cat',
        'chair', 'cow', 'diningtable', 'dog', 'horse', 'motorbike', 'person',
        'pottedplant', 'sheep', 'sofa', 'train', 'tvmonitor'
    ]
    legend_entries = [
        '608x800', '496x656', '400x544', '720x960', '864x1152', '608x800-h',
        '496x656-h', '400x544-h', '720x960-h', '864x1152-h'
    ]

    legend_entries_meta = [
        '608x800', '608x800-h', '496x656', '496x656-h', '400x544', '400x544-h',
        '720x960', '720x960-h', '864x1152', '864x1152-h'
    ]

    # '496x656-h',
    # '400x544-h',
    # '720x960-h',
    # '864x1152-h']
    scales_to_keep = [9]

    for idx_test, label in enumerate(labels):
        gt_classes = np.where(label > 0)[0]
        filename = str(idx_test + 1)
        det_file = os.path.join(det_dir, filename + '.npy')
        dets_curr = np.load(det_file)

        for gt_class in gt_classes:
            label_str = label_strs[gt_class]
            print label_str
            out_dir_curr = os.path.join(out_dir, label_str)
            util.mkdir(out_dir_curr)
            x = range(dets_curr.shape[2])

            xAndYs = [(x, dets_curr[scale_idx, gt_class, :])
                      for scale_idx in scales_to_keep]
            # range(0,dets_curr.shape[0],2)]
            out_file = os.path.join(
                out_dir_curr, '0' * (5 - len(filename)) + filename + '.jpg')

            print out_file

            legend_entries = [
                legend_entries_meta[idx] for idx in scales_to_keep
            ]
            visualize.plotSimple(xAndYs,
                                 out_file=out_file,
                                 title='Det Branch Output Multiscale',
                                 xlabel='ROIs',
                                 ylabel='Det Conf',
                                 legend_entries=legend_entries)

            # print np.sum(dets_curr[:,gt_class,:],axis=1)
            # print det_curr.shape

            # raw_input()
    for label_str in label_strs:
        out_dir_curr = os.path.join(out_dir, label_str)
        visualize.writeHTMLForFolder(out_dir_curr)