Esempio n. 1
0
import numpy as np
from rankit.build.rank_script.rlscore.learner.rls import LeavePairOutRLS
from rankit.build.rank_script.rlscore.utilities.reader import read_folds
from rankit.build.rank_script.rlscore.utilities.reader import read_sparse
from rankit.build.rank_script.rlscore.measure import auc

train_labels = np.loadtxt("./legacy_tests/data/class_train.labels")
test_labels = np.loadtxt("./legacy_tests/data/class_test.labels")
folds = read_folds("./legacy_tests/data/folds.txt")
train_features = read_sparse("./legacy_tests/data/class_train.features")
test_features = read_sparse("./legacy_tests/data/class_test.features")
kwargs = {}
kwargs['measure'] = auc
kwargs['regparams'] = [2**i for i in range(-10, 11)]
kwargs["Y"] = train_labels
kwargs["X"] = train_features
kwargs["folds"] = folds
learner = LeavePairOutRLS(**kwargs)
grid = kwargs['regparams']
perfs = learner.cv_performances
for i in range(len(grid)):
    print("parameter %f cv_performance %f" % (grid[i], perfs[i]))
P = learner.predict(test_features)
test_perf = auc(test_labels, P)
print("test set performance: %f" % test_perf)
Esempio n. 2
0
import numpy as np
from rankit.build.rank_script.rlscore.learner.query_rankrls import LeaveQueryOutRankRLS
from rankit.build.rank_script.rlscore.utilities.reader import read_qids
from rankit.build.rank_script.rlscore.utilities.reader import read_sparse
from rankit.build.rank_script.rlscore.measure import cindex
train_labels = np.loadtxt("./legacy_tests/data/rank_train.labels")
test_labels = np.loadtxt("./legacy_tests/data/rank_test.labels")
train_qids = read_qids("./legacy_tests/data/rank_train.qids")
test_features = read_sparse("./legacy_tests/data/rank_test.features")
train_features = read_sparse("./legacy_tests/data/rank_train.features")
test_qids = read_qids("./legacy_tests/data/rank_test.qids")
kwargs = {}
kwargs['measure']=cindex
kwargs['regparams'] = [2**i for i in range(-10,11)]
kwargs["Y"] = train_labels
kwargs["X"] = train_features
kwargs["qids"] = train_qids
learner = LeaveQueryOutRankRLS(**kwargs)
grid = kwargs['regparams']
perfs = learner.cv_performances
for i in range(len(grid)):
    print("parameter %f cv_performance %f" %(grid[i], perfs[i]))
P = learner.predict(test_features)
from rankit.build.rank_script.rlscore.measure.measure_utilities import UndefinedPerformance
from rankit.build.rank_script.rlscore.measure.measure_utilities import qids_to_splits
test_qids = qids_to_splits(test_qids)
perfs = []
for query in test_qids:
    try:
        perf = cindex(test_labels[query], P[query])
        perfs.append(perf)