コード例 #1
0
ファイル: measure5.py プロジェクト: aatapa/RLScore
def train_rls():
    #Trains RLS with automatically selected regularization parameter
    X_train, Y_train, X_test, Y_test = load_housing()
    regparams = [2.**i for i in range(-15, 16)]
    learner = LeaveOneOutRLS(X_train, Y_train, regparams = regparams, measure=cindex)
    loo_errors = learner.cv_performances
    P_test = learner.predict(X_test)
    print("leave-one-out cindex " +str(loo_errors))
    print("chosen regparam %f" %learner.regparam)
    print("test cindex %f" %cindex(Y_test, P_test))
コード例 #2
0
ファイル: regression3.py プロジェクト: disc5/RLScore
def train_rls():
    #Trains RLS with automatically selected regularization parameter
    X_train, Y_train, X_test, Y_test = load_housing()
    regparams = [2.**i for i in range(-15, 16)]
    learner = LeaveOneOutRLS(X_train, Y_train, regparams = regparams)
    loo_errors = learner.cv_performances
    P_test = learner.predict(X_test)
    print("leave-one-out errors " +str(loo_errors))
    print("chosen regparam %f" %learner.regparam)
    print("test error %f" %sqerror(Y_test, P_test))
コード例 #3
0
ファイル: classification6.py プロジェクト: aatapa/RLScore
def train_rls():
    X_train, Y_train, X_test, Y_test = load_wine()
    #Map labels from set {1,2,3} to one-vs-all encoding
    Y_train = to_one_vs_all(Y_train, False)
    Y_test = to_one_vs_all(Y_test, False)
    regparams = [2.**i for i in range(-15, 16)]
    learner = LeaveOneOutRLS(X_train, Y_train, regparams=regparams, measure=ova_accuracy)
    P_test = learner.predict(X_test)
    #ova_accuracy computes one-vs-all classification accuracy directly between transformed
    #class label matrix, and a matrix of predictions, where each column corresponds to a class
    print("test set accuracy %f" %ova_accuracy(Y_test, P_test))
コード例 #4
0
def train_rls():
    X_train, Y_train, X_test, Y_test = load_wine()
    #Map labels from set {1,2,3} to one-vs-all encoding
    Y_train = to_one_vs_all(Y_train)
    Y_test = to_one_vs_all(Y_test)
    regparams = [2.**i for i in range(-15, 16)]
    learner = LeaveOneOutRLS(X_train, Y_train, regparams=regparams, measure=ova_accuracy)
    P_test = learner.predict(X_test)
    #ova_accuracy computes one-vs-all classification accuracy directly between transformed
    #class label matrix, and a matrix of predictions, where each column corresponds to a class
    print("test set accuracy %f" %ova_accuracy(Y_test, P_test))
コード例 #5
0
def train_rls():
    X_train, Y_train, foo = read_svmlight("a1a.t")
    X_test, Y_test, foo = read_svmlight("a1a")
    #select randomly 500 basis vectors
    indices = range(X_train.shape[0])
    indices = random.sample(indices, 500)
    basis_vectors = X_train[indices]
    regparams = [2.**i for i in range(-15, 16)]
    gammas = regparams
    best_regparam = None
    best_gamma = None
    best_acc = 0.
    best_learner = None
    for gamma in gammas:
        #New RLS is initialized for each kernel parameter
        learner = LeaveOneOutRLS(X_train,
                                 Y_train,
                                 basis_vectors=basis_vectors,
                                 kernel="GaussianKernel",
                                 gamma=gamma,
                                 regparams=regparams,
                                 measure=accuracy)
        acc = np.max(learner.cv_performances)
        if acc > best_acc:
            best_acc = acc
            best_regparam = learner.regparam
            best_gamma = gamma
            best_learner = learner
    P_test = best_learner.predict(X_test)
    print("best parameters gamma %f regparam %f" % (best_gamma, best_regparam))
    print("best leave-one-out accuracy %f" % best_acc)
    print("test set accuracy %f" % accuracy(Y_test, P_test))
コード例 #6
0
ファイル: regression5.py プロジェクト: disc5/RLScore
def train_rls():
    #Selects both the gamma parameter for Gaussian kernel, and regparam with loocv
    X_train, Y_train, X_test, Y_test = load_housing()
    regparams = [2.**i for i in range(-15, 16)]
    gammas = regparams
    best_regparam = None
    best_gamma = None
    best_error = float("inf")
    best_learner = None
    for gamma in gammas:
        #New RLS is initialized for each kernel parameter
        learner = LeaveOneOutRLS(X_train,
                                 Y_train,
                                 kernel="GaussianKernel",
                                 gamma=gamma,
                                 regparams=regparams)
        e = np.min(learner.cv_performances)
        if e < best_error:
            best_error = e
            best_regparam = learner.regparam
            best_gamma = gamma
            best_learner = learner
    P_test = best_learner.predict(X_test)
    print("best parameters gamma %f regparam %f" % (best_gamma, best_regparam))
    print("best leave-one-out error %f" % best_error)
    print("test error %f" % sqerror(Y_test, P_test))
コード例 #7
0
    def fit(self,
            X_src,
            y_src,
            X_tgt_known,
            y_tgt_known,
            X_tgt_unknown,
            y_tgt_unknown,
            verbose=False):
        # Map labels from set {1,2,3} to one-vs-all encoding

        if np.count_nonzero(y_src) >= len(y_src):
            zerolabels = False
        else:
            zerolabels = True

        y_src = to_one_vs_all(y_src, zerolabels)

        regparams = [2.**i for i in range(-15, 16)]
        if len(np.unique(y_src)) > 2:
            self.measure = ova_accuracy
        else:
            self.measure = accuracy

        self.learner = LeaveOneOutRLS(X_src,
                                      y_src,
                                      regparams=regparams,
                                      measure=self.measure)
        p_tgt = self.learner.predict(X_tgt_known)
        # ova_accuracy computes one-vs-all classification accuracy directly between transformed
        # class label matrix, and a matrix of predictions, where each column corresponds to a class
        self.learner = RLS(X_src, y_src)
        best_regparam = None
        best_accuracy = 0.
        # exponential grid of possible regparam values
        log_regparams = range(-15, 16)
        for log_regparam in log_regparams:
            regparam = 2.**log_regparam
            # RLS is re-trained with the new regparam, this
            # is very fast due to computational short-cut
            self.learner.solve(regparam)
            # Leave-one-out cross-validation predictions, this is fast due to
            # computational short-cut
            P_loo = self.learner.leave_one_out()
            acc = self.measure(y_src, P_loo)
            if verbose == True:
                print("LooRLS regparam 2**%d, loo-accuracy %f" %
                      (log_regparam, acc))
            if acc > best_accuracy:
                best_accuracy = acc
                best_regparam = regparam
        self.learner.solve(best_regparam)
        if verbose == True:
            print("LooRLS best regparam %f with loo-accuracy %f" %
                  (best_regparam, best_accuracy))