示例#1
0
def print_matrix(mat):
    for i in range(len(mat)):
        for j in range(len(mat[i])):
            if (j + 1 != len(mat[i])):
                stdout.write(str(precision(mat[i][j])) + "\t")
            else:
                stdout.write(str(precision(mat[i][j])))
        stdout.write("\n")
示例#2
0
def f1_score(y_true, y_pred):
    pr = precision(y_true, y_pred)
    re = recall(y_true, y_pred)
    return 2 * pr * re / (pr + re)
		#precision&map&p@k
		sum_ap=0
		sum_p5=0
		sum_p20=0
		for q in results:
			rele_set=set([])
			if q in relevant:
				rele_set=relevant[q]
			#if there is no relevant documents for current query, exclude it 
			else:
				validQ_Count-=1
				continue
			table_path=p_dir+'/Q'+q+".txt"
			checkDup(table_path)
			[precision_table,ap]=precision(rele_set,results[q],table_path)
			sum_p5+=precision_table[4]
			sum_p20+=precision_table[19]
			sum_ap+=ap
			fd_pre.write(q)
			for x in precision_table:
				fd_pre.write("\t"+str(x))
			fd_pre.write('\n')
		fd_pre.close()
		AP5=sum_p5/validQ_Count
		AP20=sum_p20/validQ_Count
		MAP=sum_ap/validQ_Count
		
		#recall&mrr
		sum_rr=0
		r_dir=recall_table_dir+'/'+system
示例#4
0
def run():
    use_gpu = 1

    # top K returned images
    top_k = 1000
    feat_len = 48  # number of activation output on hidden layer

    # Absolute path of this project
    root_dir = '/opt/work/caffe-cvprw15'

    # set result folder
    result_folder = os.path.join(root_dir, 'analysis')

    # models
    model_file = os.path.join(
        root_dir, 'examples/cvprw15-cifar10/KevinNet_CIFAR10_48.caffemodel')
    model_def_file = os.path.join(
        root_dir,
        'examples/cvprw15-cifar10/KevinNet_CIFAR10_48_deploy.prototxt')

    # train-test
    test_file_list = os.path.join(
        root_dir, 'examples/cvprw15-cifar10/dataset/test-file-list.txt')
    test_label_file = os.path.join(
        root_dir, 'examples/cvprw15-cifar10/dataset/test-label.txt')
    train_file_list = os.path.join(
        root_dir, 'examples/cvprw15-cifar10/dataset/train-file-list.txt')
    train_label_file = os.path.join(
        root_dir, 'examples/cvprw15-cifar10/dataset/train-label.txt')

    # outputs
    #feat_test_file = '{:s}/feat-test.mat'.format(result_folder)
    #feat_train_file = '{:s}/feat-train.mat'.format(result_folder)
    binary_test_file = '{:s}/binary-test.mat'.format(result_folder)
    binary_train_file = '{:s}/binary-train.mat'.format(result_folder)

    py_feat_test_file = '{:s}/feat-test.pkl'.format(result_folder)
    py_feat_train_file = '{:s}/feat-train.pkl'.format(result_folder)
    py_binary_test_file = '{:s}/binary-test.pkl'.format(result_folder)
    py_binary_train_file = '{:s}/binary-train.pkl'.format(result_folder)

    USE_MAT_FEAT = False  # whether load MATLAB code generated and saved feature files, or not

    # feature extraction- test set
    if USE_MAT_FEAT and os.path.exists(binary_test_file):  # load .mat file
        data = h5py.File(binary_test_file)
        binary_test = data['binary_test'][:]  # 10000 x 48
        #binary_test = binary_test.T  # transpose 转置 => 48 x 1000
    elif os.path.exists(py_binary_test_file):
        fid = open(py_binary_test_file, 'rb')
        binary_test = cPickle.load(fid)
        fid.close()
    else:
        feat_test = pycaffe_batch_feat(test_file_list, use_gpu, feat_len,
                                       model_def_file, model_file, root_dir)
        binary_test = (feat_test > 0.5).astype(int)
        # save extracted features to disk
        with open(py_feat_test_file, 'wb') as fid:
            cPickle.dump(feat_test, fid)
        with open(py_binary_test_file, 'wb') as fid:
            cPickle.dump(binary_test, fid)

    # feature extraction- train set
    if USE_MAT_FEAT and os.path.exists(binary_train_file):  # load .mat file
        data = h5py.File(binary_train_file)
        binary_train = data['binary_train'][:]  # 10000 x 48
        #binary_train = binary_train.T  # transpose 转置 => 48 x 1000
    elif os.path.exists(py_binary_train_file):
        fid = open(py_binary_train_file, 'rb')
        binary_train = cPickle.load(fid)
        fid.close()
    else:
        feat_train = pycaffe_batch_feat(train_file_list, use_gpu, feat_len,
                                        model_def_file, model_file, root_dir)
        binary_train = (feat_train > 0.5).astype(int)
        # save extracted features to disk
        with open(py_feat_train_file, 'wb') as fid:
            cPickle.dump(feat_train, fid)
        with open(py_binary_train_file, 'wb') as fid:
            cPickle.dump(binary_train, fid)

    # load training images' labels. double type
    trn_label = load_label(train_label_file)
    tst_label = load_label(test_label_file)

    mAP, precision_at_k = precision(trn_label, binary_train, tst_label,
                                    binary_test, top_k, 1)

    print('mAP = {:f}'.format(mAP))
    saved_result = {'mAP': mAP, 'precision_at_k': precision_at_k}

    save_file = os.path.join(root_dir, 'saved_result.pkl')
    fid = open(save_file, 'wb')
    cPickle.dump(saved_result, fid)
    fid.close()
示例#5
0
def run():
    use_gpu = 1

    # top K returned images
    top_k = 1000
    feat_len = 48  # number of activation output on hidden layer

    # Absolute path of this project
    root_dir = 'E:/GitHub/cvprw15-win/'
    root_dataset = 'E:/GitHub/dataset/cifar10/'

    # set result folder
    result_folder = os.path.join(root_dir, 'analysis/cvprw15-cifar10')

    # models
    model_file = os.path.join(
        root_dir, 'examples/cvprw15-cifar10/KevinNet_CIFAR10_48.caffemodel')
    model_def_file = os.path.join(
        root_dir,
        'examples/cvprw15-cifar10/KevinNet_CIFAR10_48_deploy.prototxt')

    # train-test
    test_file_list = os.path.join(
        root_dataset, 'examples/cvprw15-cifar10/dataset/test-file-list.txt')
    test_label_file = os.path.join(
        root_dataset, 'examples/cvprw15-cifar10/dataset/test-label.txt')
    train_file_list = os.path.join(
        root_dataset, 'examples/cvprw15-cifar10/dataset/train-file-list.txt')
    train_label_file = os.path.join(
        root_dataset, 'examples/cvprw15-cifar10/dataset/train-label.txt')

    # outputs
    feat_test_file = '{:s}/feat-test.mat'.format(result_folder)
    feat_train_file = '{:s}/feat-train.mat'.format(result_folder)
    binary_test_file = '{:s}/binary-test.mat'.format(result_folder)
    binary_train_file = '{:s}/binary-train.mat'.format(result_folder)

    # feature extraction- test set
    if os.path.exists(binary_test_file):  # load .mat file
        binary_test = scio.loadmat(binary_test_file)['binary_test'].astype(
            float)
        binary_test = binary_test.T
    else:
        feat_test, _ = pycaffe_batch_feat(test_file_list, use_gpu, feat_len,
                                          model_def_file, model_file, root_dir)
        scio.savemat(feat_test_file, {'feat_test': feat_test.T})
        binary_test = (feat_test > 0.5).astype(int)
        scio.savemat(binary_test_file, {'binary_test': binary_test.T})

    # feature extraction- train set
    if os.path.exists(binary_train_file):  # load .mat file
        binary_train = scio.loadmat(binary_train_file)['binary_train'].astype(
            float)
        binary_train = binary_train.T
    else:
        feat_train, _ = pycaffe_batch_feat(train_file_list, use_gpu, feat_len,
                                           model_def_file, model_file,
                                           root_dir)
        scio.savemat(feat_train_file, {'feat_train': feat_train.T})
        binary_train = (feat_train > 0.5).astype(int)
        scio.savemat(binary_train_file, {'binary_train': binary_train.T})

    # load training images' labels. double type
    trn_label = load_label(train_label_file)
    tst_label = load_label(test_label_file)

    mAP, precision_at_k = precision(trn_label, binary_train, tst_label,
                                    binary_test, top_k, 1)

    print('mAP = {:f}'.format(mAP))
    saved_result = {'mAP': mAP, 'precision_at_k': precision_at_k}

    save_file = os.path.join(result_folder, 'saved_result.pkl')
    fid = open(save_file, 'wb')
    pickle.dump(saved_result, fid)
    fid.close()