示例#1
0
from vsSummDevs.SumEvaluation import metrics
from vsSummDevs.datasets.SumMe import SumMeMultiViewFeatureLoader

video_file_stem_names = dataset_pathvars.file_names
video_file_stem_names.sort()




t_acc_min = 0
t_acc_max = 0
t_acc_mean = 0
for video_idx, s_filename in enumerate(video_file_stem_names):
    _, user_labels, feature_sizes = SumMeMultiViewFeatureLoader.load_by_name(s_filename, doSoftmax=False)

    s_F1_scores = []
    for user_idx in range(user_labels.shape[1]):
        selected_labels = user_labels[:, user_idx]
        user_scores_list = [user_labels[:, i] for i in list(set(range(user_labels.shape[1]))-set([user_idx]))]
        s_F1_score = metrics.averaged_F1_score(y_trues=user_scores_list, y_score=selected_labels.tolist())
        s_F1_scores.append(s_F1_score)

    print "[{:02d} | {:02d}]\t{:s}: \tMin:{:.04f}\tMax:{:.04f}, Mean:{:.04f}".format(video_idx, len(video_file_stem_names), s_filename, min(s_F1_scores), max(s_F1_scores), np.mean(np.asarray(s_F1_scores)))
    t_acc_min += min(s_F1_scores)
    t_acc_max += max(s_F1_scores)
    t_acc_mean += np.mean(np.asarray(s_F1_scores))
print "Total MinAcc: {:.04f}\t MaxAcc{:.04f}\t Mean:{:.04f}".format(t_acc_min/len(video_file_stem_names), t_acc_max/len(video_file_stem_names), t_acc_mean/len(video_file_stem_names))



示例#2
0
F1_scores = 0
for video_idx, s_filename in enumerate(videofile_stems):

    video_features, user_labels, _ = SumMeMultiViewFeatureLoader.load_by_name(
        s_filename)
    avg_labels = np.mean(user_labels, axis=1)

    clf = svm.LinearSVR()
    clf.fit(video_features, avg_labels)

    frame_contrib = clf.predict(video_features)
    # frame_contrib = (frame_contrib- np.min(frame_contrib))/(np.max(frame_contrib)-np.mean(frame_contrib))
    # s_seg = pdefined_segs[s_filename]
    # s_frame01scores = rep_conversions.framescore2frame01score(frame_contrib, s_seg)
    #
    s_frame01scores = rep_conversions.framescore2frame01score_sort(
        frame_contrib)
    user_scores_list = [user_labels[:, i] for i in range(user_labels.shape[1])]
    s_F1_score = metrics.averaged_F1_score(y_trues=user_scores_list,
                                           y_score=s_frame01scores.tolist())

    s_scorr, s_p = pearsonr(frame_contrib, avg_labels)
    print "[{:d} | {:d}] \t {:s} \t{:.04f}\t correlation:{:.04f}, \t[D * N ]: [{:d}, {:d}]".format(
        video_idx, len(videofile_stems), s_filename, s_F1_score, s_scorr,
        video_features.shape[1], video_features.shape[0])
    F1_scores += s_F1_score

print "overall F1 score: {:.04f}".format(F1_scores / len(videofile_stems))

# print "DEBUG"