Пример #1
0
def train_rls():
    X_train, Y_train, foo = read_svmlight("a1a.t")
    X_test, Y_test, foo = read_svmlight("a1a", X_train.shape[1])
    learner = RLS(X_train, Y_train)
    best_regparam = None
    best_accuracy = 0.
    #exponential grid of possible regparam values
    log_regparams = range(-15, 16)
    for log_regparam in log_regparams:
        regparam = 2.**log_regparam
        #RLS is re-trained with the new regparam, this
        #is very fast due to computational short-cut
        learner.solve(regparam)
        #Leave-one-out cross-validation predictions, this is fast due to
        #computational short-cut
        P_loo = learner.leave_one_out()
        acc = accuracy(Y_train, P_loo)
        print("regparam 2**%d, loo-accuracy %f" %(log_regparam, acc))
        if acc > best_accuracy:
            best_accuracy = acc
            best_regparam = regparam
    learner.solve(best_regparam)
    P_test = learner.predict(X_test)
    print("best regparam %f with loo-accuracy %f" %(best_regparam, best_accuracy)) 
    print("test set accuracy %f" %accuracy(Y_test, P_test))
Пример #2
0
def train_rls():
    X_train, Y_train, foo = read_svmlight("a1a.t")
    X_test, Y_test, foo = read_svmlight("a1a", X_train.shape[1])
    learner = RLS(X_train, Y_train)
    best_regparam = None
    best_accuracy = 0.
    #exponential grid of possible regparam values
    log_regparams = range(-15, 16)
    for log_regparam in log_regparams:
        regparam = 2.**log_regparam
        #RLS is re-trained with the new regparam, this
        #is very fast due to computational short-cut
        learner.solve(regparam)
        #Leave-one-out cross-validation predictions, this is fast due to
        #computational short-cut
        P_loo = learner.leave_one_out()
        acc = accuracy(Y_train, P_loo)
        print("regparam 2**%d, loo-accuracy %f" %(log_regparam, acc))
        if acc > best_accuracy:
            best_accuracy = acc
            best_regparam = regparam
    learner.solve(best_regparam)
    P_test = learner.predict(X_test)
    print("best regparam %f with loo-accuracy %f" %(best_regparam, best_accuracy)) 
    print("test set accuracy %f" %accuracy(Y_test, P_test))
Пример #3
0
def train_rls():
    X_train, Y_train, foo = read_svmlight("a1a.t")
    X_test, Y_test, foo = read_svmlight("a1a")
    #select randomly 500 basis vectors
    indices = range(X_train.shape[0])
    indices = random.sample(indices, 500)
    basis_vectors = X_train[indices]
    regparams = [2.**i for i in range(-15, 16)]
    gammas = regparams
    best_regparam = None
    best_gamma = None
    best_acc = 0.
    best_learner = None
    for gamma in gammas:
        #New RLS is initialized for each kernel parameter
        learner = LeaveOneOutRLS(X_train,
                                 Y_train,
                                 basis_vectors=basis_vectors,
                                 kernel="GaussianKernel",
                                 gamma=gamma,
                                 regparams=regparams,
                                 measure=accuracy)
        acc = np.max(learner.cv_performances)
        if acc > best_acc:
            best_acc = acc
            best_regparam = learner.regparam
            best_gamma = gamma
            best_learner = learner
    P_test = best_learner.predict(X_test)
    print("best parameters gamma %f regparam %f" % (best_gamma, best_regparam))
    print("best leave-one-out accuracy %f" % best_acc)
    print("test set accuracy %f" % accuracy(Y_test, P_test))
Пример #4
0
def train_rls():
    X_train, Y_train, foo = read_svmlight("a1a.t")
    X_test, Y_test, foo = read_svmlight("a1a")
    # select randomly 100 basis vectors
    indices = range(X_train.shape[0])
    indices = random.sample(indices, 100)
    basis_vectors = X_train[indices]
    regparams = [2.0 ** i for i in range(-15, 16)]
    gammas = [2 ** i for i in range(-10, 1)]
    best_regparam = None
    best_gamma = None
    best_acc = 0.0
    best_learner = None
    for gamma in gammas:
        # New RLS is initialized for each kernel parameter
        learner = LeaveOneOutRLS(
            X_train,
            Y_train,
            basis_vectors=basis_vectors,
            kernel="GaussianKernel",
            gamma=gamma,
            regparams=regparams,
            measure=accuracy,
        )
        acc = np.max(learner.cv_performances)
        print("gamma %f, regparam %f, accuracy %f" % (gamma, learner.regparam, acc))
        if acc > best_acc:
            best_acc = acc
            best_regparam = learner.regparam
            best_gamma = gamma
            best_learner = learner
    P_test = best_learner.predict(X_test)
    print("best parameters gamma %f regparam %f" % (best_gamma, best_regparam))
    print("best leave-one-out accuracy %f" % best_acc)
    print("test set accuracy %f" % accuracy(Y_test, P_test))
Пример #5
0
from rlscore.measure import accuracy

#My class labels, three examples in positive and two in negative
Y = [-1, -1, -1, 1, 1]

#Some predictions
P = [-1, -1, 1, 1, 1]

print("My accuracy %f" % accuracy(Y, P))

#Accuracy accepts real-valued predictions, P2[i]>0 are mapped to +1, rest to -1
P2 = [-2.7, -1.3, 0.2, 1.3, 1]

print("My accuracy with real-valued predictions %f" % accuracy(Y, P2))

Y2 = [2, 1, 3, 4, 1]

#Labels must be in the set {-1,1}, this will not work

accuracy(Y2, P)
Пример #6
0
from rlscore.measure import accuracy

#My class labels, three examples in positive and two in negative
Y = [-1, -1, -1, 1, 1]

#Some predictions
P = [-1, -1, 1, 1, 1]

print("My accuracy %f" %accuracy(Y,P))

#Accuracy accepts real-valued predictions, P2[i]>0 are mapped to +1, rest to -1
P2 = [-2.7, -1.3, 0.2, 1.3, 1]

print("My accuracy with real-valued predictions %f" %accuracy(Y,P2))

Y2 = [2, 1 , 3, 4, 1]

#Labels must be in the set {-1,1}, this will not work

accuracy(Y2, P)