示例#1
0
    def test_kron_rls(self):

        regparam = 0.001

        K_train1, K_train2, Y_train, K_test1, K_test2, Y_test, X_train1, X_train2, X_test1, X_test2 = self.generate_xortask(
        )
        rows, columns = Y_train.shape
        #print K_train1.shape, K_train2.shape, K_test1.shape, K_test2.shape, rows, columns
        trainlabelcount = rows * columns
        indmatrix = np.mat(range(trainlabelcount)).T.reshape(rows, columns)

        #Train linear Kronecker RLS with data-matrices
        params = {}
        params["regparam"] = regparam
        params["xmatrix1"] = X_train1
        params["xmatrix2"] = X_train2
        params["train_labels"] = Y_train
        linear_kron_learner = KronRLS.createLearner(**params)
        linear_kron_learner.train()
        linear_kron_model = linear_kron_learner.getModel()
        linear_kron_testpred = linear_kron_model.predictWithDataMatrices(
            X_test1, X_test2)

        #Train kernel Kronecker RLS with pre-computed kernel matrices
        params = {}
        params["regparam"] = regparam
        params["kmatrix1"] = K_train1
        params["kmatrix2"] = K_train2
        params["train_labels"] = Y_train
        kernel_kron_learner = KronRLS.createLearner(**params)
        kernel_kron_learner.train()
        kernel_kron_model = kernel_kron_learner.getModel()
        kernel_kron_testpred = kernel_kron_model.predictWithKernelMatrices(
            K_test1, K_test2)

        #Train an ordinary RLS regressor for reference
        K_Kron_train_x = np.kron(K_train1, K_train2)
        params = {}
        params["kernel_matrix"] = K_Kron_train_x
        params["train_labels"] = Y_train.reshape(trainlabelcount, 1)
        ordrls_learner = RLS.createLearner(**params)
        ordrls_learner.solve(regparam)
        ordrls_model = ordrls_learner.getModel()
        K_Kron_test_x = np.kron(K_test1, K_test2)
        ordrls_testpred = ordrls_model.predict(K_Kron_test_x)
        ordrls_testpred = ordrls_testpred.reshape(Y_test.shape[0],
                                                  Y_test.shape[1])

        print
        print type(linear_kron_testpred), type(kernel_kron_testpred), type(
            ordrls_testpred)
        print linear_kron_testpred[0, 0], kernel_kron_testpred[
            0, 0], ordrls_testpred[0, 0]
        print linear_kron_testpred[0, 1], kernel_kron_testpred[
            0, 1], ordrls_testpred[0, 1]
        print linear_kron_testpred[1, 0], kernel_kron_testpred[
            1, 0], ordrls_testpred[1, 0]

        print np.mean(np.abs(linear_kron_testpred - ordrls_testpred)), np.mean(
            np.abs(kernel_kron_testpred - ordrls_testpred))
示例#2
0
 def testModel(self):
     
     train_labels = np.random.random((10))
     test_labels = np.random.random((10))
     train_features = np.random.random((10,100))
     test_features = np.random.random((10,100))
     kwargs = {}
     kwargs["train_labels"] = train_labels
     kwargs["train_features"] = train_features
     kwargs["regparam"] = 1
     learner = RLS.createLearner(**kwargs)
     learner.train()
     model = learner.getModel()
     print
     #print 'Ten data points, single label '
     model = mod.LinearModel(np.random.random((100)))
     self.all_pred_cases(model)
     
     model = mod.LinearModel(np.random.random((100, 2)))
     self.all_pred_cases(model)
     
     #model = mod.LinearModel(np.random.random((1, 2)))
     #self.all_pred_cases(model)
     
     kwargs["kernel"] = "GaussianKernel"
     train_labels = np.random.random((10))
     kwargs["train_labels"] = train_labels
     learner = RLS.createLearner(**kwargs)
     learner.train()
     model = learner.getModel()
     self.all_pred_cases(model)
     
     kwargs["kernel"] = "GaussianKernel"
     train_labels = np.random.random((10,2))
     kwargs["train_labels"] = train_labels
     learner = RLS.createLearner(**kwargs)
     learner.train()
     model = learner.getModel()
     self.all_pred_cases(model)
示例#3
0
 def test_kron_rls(self):
     
     regparam = 0.001
     
     K_train1, K_train2, Y_train, K_test1, K_test2, Y_test, X_train1, X_train2, X_test1, X_test2 = self.generate_xortask()
     rows, columns = Y_train.shape
     #print K_train1.shape, K_train2.shape, K_test1.shape, K_test2.shape, rows, columns
     trainlabelcount = rows * columns
     indmatrix = np.mat(range(trainlabelcount)).T.reshape(rows, columns)
     
     #Train linear Kronecker RLS with data-matrices
     params = {}
     params["regparam"] = regparam
     params["xmatrix1"] = X_train1
     params["xmatrix2"] = X_train2
     params["train_labels"] = Y_train
     linear_kron_learner = KronRLS.createLearner(**params)
     linear_kron_learner.train()
     linear_kron_model = linear_kron_learner.getModel()
     linear_kron_testpred = linear_kron_model.predictWithDataMatrices(X_test1, X_test2)
     
     #Train kernel Kronecker RLS with pre-computed kernel matrices
     params = {}
     params["regparam"] = regparam
     params["kmatrix1"] = K_train1
     params["kmatrix2"] = K_train2
     params["train_labels"] = Y_train
     kernel_kron_learner = KronRLS.createLearner(**params)
     kernel_kron_learner.train()
     kernel_kron_model = kernel_kron_learner.getModel()
     kernel_kron_testpred = kernel_kron_model.predictWithKernelMatrices(K_test1, K_test2)
     
     #Train an ordinary RLS regressor for reference
     K_Kron_train_x = np.kron(K_train1, K_train2)
     params = {}
     params["kernel_matrix"] = K_Kron_train_x
     params["train_labels"] = Y_train.reshape(trainlabelcount, 1)
     ordrls_learner = RLS.createLearner(**params)
     ordrls_learner.solve(regparam)
     ordrls_model = ordrls_learner.getModel()
     K_Kron_test_x = np.kron(K_test1, K_test2)
     ordrls_testpred = ordrls_model.predict(K_Kron_test_x)
     ordrls_testpred = ordrls_testpred.reshape(Y_test.shape[0], Y_test.shape[1])
     
     print
     print type(linear_kron_testpred), type(kernel_kron_testpred), type(ordrls_testpred)
     print linear_kron_testpred[0, 0], kernel_kron_testpred[0, 0], ordrls_testpred[0, 0]
     print linear_kron_testpred[0, 1], kernel_kron_testpred[0, 1], ordrls_testpred[0, 1]
     print linear_kron_testpred[1, 0], kernel_kron_testpred[1, 0], ordrls_testpred[1, 0]
     
     print np.mean(np.abs(linear_kron_testpred - ordrls_testpred)), np.mean(np.abs(kernel_kron_testpred - ordrls_testpred))
示例#4
0
from rlscore.measure import auc
from rlscore.learner.rls import LOOCV
from rlscore.utilities.grid_search import grid_search

train_labels = np.loadtxt("./examples/data/class_train.labels")
test_labels = np.loadtxt("./examples/data/class_test.labels")
train_features = read_sparse("./examples/data/class_train.features")
test_features = read_sparse("./examples/data/class_test.features")
kwargs = {}
kwargs["train_labels"] = train_labels
kwargs["train_features"] = train_features
kwargs["regparam"] = 1
kwargs["coef0"] = 1
kwargs["degree"] = 3
kwargs["gamma"] = 2
kwargs["kernel"] = "PolynomialKernel"
learner = RLS.createLearner(**kwargs)
learner.train()
kwargs = {}
kwargs["learner"] = learner
kwargs["measure"] = auc
crossvalidator = LOOCV(**kwargs)
grid = [2**i for i in range(-10, 11)]
learner, perfs = grid_search(crossvalidator, grid)
for i in range(len(grid)):
    print "parameter %f cv_performance %f" % (grid[i], perfs[i])
model = learner.getModel()
P = model.predict(test_features)
test_perf = auc(test_labels, P)
print "test set performance: %f" % test_perf
示例#5
0
from rlscore.reader import read_sparse
from rlscore.reader import read_sparse
from rlscore.measure import auc
from rlscore.learner.rls import NfoldCV
from rlscore.utilities.grid_search import grid_search

train_labels = np.loadtxt("./examples/data/class_train.labels")
test_labels = np.loadtxt("./examples/data/class_test.labels")
folds = read_folds("./examples/data/folds.txt")
train_features = read_sparse("./examples/data/class_train.features")
test_features = read_sparse("./examples/data/class_test.features")
kwargs = {}
kwargs["train_labels"] = train_labels
kwargs["train_features"] = train_features
kwargs["regparam"] = 1
learner = RLS.createLearner(**kwargs)
learner.train()
kwargs = {}
kwargs["learner"] = learner
kwargs["folds"] = folds
kwargs["measure"] = auc
crossvalidator = NfoldCV(**kwargs)
grid = [2 ** i for i in range(-10, 11)]
learner, perfs = grid_search(crossvalidator, grid)
for i in range(len(grid)):
    print "parameter %f cv_performance %f" % (grid[i], perfs[i])
model = learner.getModel()
P = model.predict(test_features)
test_perf = auc(test_labels, P)
print "test set performance: %f" % test_perf
示例#6
0
    def test_two_step_rls(self):
        
        regparam1 = 0.001
        regparam2 = 10
        
        K_train1, K_train2, Y_train, K_test1, K_test2, Y_test, X_train1, X_train2, X_test1, X_test2 \
            = self.generate_xortask()
        rows, columns = Y_train.shape
        #print K_train1.shape, K_train2.shape, K_test1.shape, K_test2.shape, rows, columns
        trainlabelcount = rows * columns
        indmatrix = np.mat(range(trainlabelcount)).T.reshape(rows, columns)
        
        
#         #Train linear Kronecker RLS with data-matrices
#         params = {}
#         params["regparam"] = regparam
#         params["xmatrix1"] = X_train1
#         params["xmatrix2"] = X_train2
#         params["train_labels"] = Y_train
#         linear_kron_learner = KronRLS.createLearner(**params)
#         linear_kron_learner.train()
#         linear_kron_model = linear_kron_learner.getModel()
#         linear_kron_testpred = linear_kron_model.predictWithDataMatrices(X_test1, X_test2)
        
        
        #Train linear two-step RLS with data-matrices
        params = {}
        params["regparam1"] = regparam1
        params["regparam2"] = regparam2
        params["xmatrix1"] = X_train1
        params["xmatrix2"] = X_train2
        params["train_labels"] = Y_train
        linear_two_step_learner = TwoStepRLS.createLearner(**params)
        linear_two_step_learner.train()
        linear_two_step_model = linear_two_step_learner.getModel()
        linear_two_step_testpred = linear_two_step_model.predictWithDataMatrices(X_test1, X_test2)
        
        #Train kernel two-step RLS with pre-computed kernel matrices
        params = {}
        params["regparam1"] = regparam1
        params["regparam2"] = regparam2
        params["kmatrix1"] = K_train1
        params["kmatrix2"] = K_train2
        params["train_labels"] = Y_train
        kernel_two_step_learner = TwoStepRLS.createLearner(**params)
        kernel_two_step_learner.train()
        kernel_two_step_model = kernel_two_step_learner.getModel()
        kernel_two_step_testpred = kernel_two_step_model.predictWithKernelMatrices(K_test1, K_test2)
        
        #Train ordinary RLS in two steps for a reference
        params = {}
        params["regparam"] = regparam2
        params["kernel_matrix"] = K_train2
        params["train_labels"] = Y_train.T
        ordinary_rls_first_step = RLS.createLearner(**params)
        ordinary_rls_first_step.train()
        firststeploo = ordinary_rls_first_step.computeLOO().T
        params = {}
        params["regparam"] = regparam1
        params["kernel_matrix"] = K_train1
        params["train_labels"] = firststeploo
        ordinary_rls_second_step = RLS.createLearner(**params)
        ordinary_rls_second_step.train()
        secondsteploo = ordinary_rls_second_step.computeLOO()
        #print 'Basic RLS', secondsteploo[0, 0]
        
        print
        #print type(linear_kron_testpred), type(kernel_kron_testpred), type(ordrls_testpred)
        #print linear_kron_testpred[0, 0], kernel_kron_testpred[0, 0], ordrls_testpred[0, 0]
        #print linear_kron_testpred[0, 1], kernel_kron_testpred[0, 1], ordrls_testpred[0, 1]
        #print linear_kron_testpred[1, 0], kernel_kron_testpred[1, 0], ordrls_testpred[1, 0]
        #print linear_kron_testpred[0, 0], kernel_two_step_testpred[0, 0]
        #print linear_kron_testpred[0, 1], kernel_two_step_testpred[0, 1]
        #print linear_kron_testpred[1, 0], kernel_two_step_testpred[1, 0]
        
        linear_twostepoutofsampleloo = linear_two_step_learner.computeLOO()
        kernel_twostepoutofsampleloo = kernel_two_step_learner.computeLOO()
        
        #Train linear two-step RLS without out-of-sample rows or columns for [0,0]
        params = {}
        params["regparam1"] = regparam1
        params["regparam2"] = regparam2
        params["xmatrix1"] = X_train1[range(1, X_train1.shape[0])]
        params["xmatrix2"] = X_train2[range(1, X_train2.shape[0])]
        params["train_labels"] = Y_train[np.ix_(range(1, Y_train.shape[0]), range(1, Y_train.shape[1]))]
        linear_kron_learner = TwoStepRLS.createLearner(**params)
        linear_kron_learner.train()
        linear_kron_model = linear_kron_learner.getModel()
        linear_kron_testpred_00 = linear_kron_model.predictWithDataMatrices(X_train1[0], X_train2[0])
        
        #Train linear two-step RLS without out-of-sample rows or columns for [2,4]
        params = {}
        params["regparam1"] = regparam1
        params["regparam2"] = regparam2
        params["xmatrix1"] = X_train1[[0, 1] + range(3, K_train1.shape[0])]
        params["xmatrix2"] = X_train2[[0, 1, 2, 3] + range(5, K_train2.shape[0])]
        params["train_labels"] = Y_train[np.ix_([0, 1] + range(3, K_train1.shape[0]), [0, 1, 2, 3] + range(5, K_train2.shape[0]))]
        linear_kron_learner = TwoStepRLS.createLearner(**params)
        linear_kron_learner.train()
        linear_kron_model = linear_kron_learner.getModel()
        linear_kron_testpred_24 = linear_kron_model.predictWithDataMatrices(X_train1[2], X_train2[4])
        
        #Train kernel two-step RLS without out-of-sample rows or columns for [0,0]
        params = {}
        params["regparam1"] = regparam1
        params["regparam2"] = regparam2
        params["kmatrix1"] = K_train1[np.ix_(range(1, K_train1.shape[0]), range(1, K_train1.shape[1]))]
        params["kmatrix2"] = K_train2[np.ix_(range(1, K_train2.shape[0]), range(1, K_train2.shape[1]))]
        params["train_labels"] = Y_train[np.ix_(range(1, Y_train.shape[0]), range(1, Y_train.shape[1]))]
        kernel_kron_learner = TwoStepRLS.createLearner(**params)
        kernel_kron_learner.train()
        kernel_kron_model = kernel_kron_learner.getModel()
        kernel_kron_testpred_00 = kernel_kron_model.predictWithKernelMatrices(K_train1[range(1, K_train1.shape[0]), 0], K_train2[0, range(1, K_train2.shape[0])])
        
        #Train kernel two-step RLS without out-of-sample rows or columns for [2,4]
        params = {}
        params["regparam1"] = regparam1
        params["regparam2"] = regparam2
        params["kmatrix1"] = K_train1[np.ix_([0, 1] + range(3, K_train1.shape[0]), [0, 1] + range(3, K_train1.shape[0]))]
        params["kmatrix2"] = K_train2[np.ix_([0, 1, 2, 3] + range(5, K_train2.shape[0]), [0, 1, 2, 3] + range(5, K_train2.shape[0]))]
        params["train_labels"] = Y_train[np.ix_([0, 1] + range(3, Y_train.shape[0]), [0, 1, 2, 3] + range(5, Y_train.shape[1]))]
        kernel_kron_learner = TwoStepRLS.createLearner(**params)
        kernel_kron_learner.train()
        kernel_kron_model = kernel_kron_learner.getModel()
        #print K_train1[range(1, K_train1.shape[0]), 0].shape, K_train2[0, range(1, K_train2.shape[0])].shape
        kernel_kron_testpred_24 = kernel_kron_model.predictWithKernelMatrices(K_train1[[0, 1] + range(3, K_train1.shape[0]), 2], K_train2[4, [0, 1, 2, 3] + range(5, K_train2.shape[0])])
        
        print Y_train.shape, secondsteploo.shape, kernel_twostepoutofsampleloo.shape
        print secondsteploo[0, 0], linear_kron_testpred_00, kernel_kron_testpred_00, linear_twostepoutofsampleloo[0, 0], kernel_twostepoutofsampleloo[0, 0]
        print secondsteploo[2, 4], linear_kron_testpred_24, kernel_kron_testpred_24, linear_twostepoutofsampleloo[2, 4], kernel_twostepoutofsampleloo[2, 4]
        print
        #print 'Two-step RLS LOO', twostepoutofsampleloo[2, 4]
        #print np.mean(np.abs(linear_kron_testpred - ordrls_testpred)), np.mean(np.abs(kernel_kron_testpred - ordrls_testpred))
        
        
        
        
        
        #Create symmetric data
        K_train1, K_train2, Y_train, K_test1, K_test2, Y_test, X_train1, X_train2, X_test1, X_test2 \
            = self.generate_xortask(
            trainpos1 = 6,
            trainneg1 = 7,
            trainpos2 = 6,
            trainneg2 = 7,
            testpos1 = 26,
            testneg1 = 27,
            testpos2 = 25,
            testneg2 = 25
            )
        K_train1 = K_train2
        K_test1 = K_test2
        Y_train = 0.5 * (Y_train + Y_train.T)
        
        rows, columns = Y_train.shape
        #print K_train1.shape, K_train2.shape, K_test1.shape, K_test2.shape, rows, columns
        trainlabelcount = rows * columns
        indmatrix = np.mat(range(trainlabelcount)).T.reshape(rows, columns)
        
        #Train symmetric kernel two-step RLS with pre-computed kernel matrices
        params = {}
        params["regparam1"] = regparam2
        params["regparam2"] = regparam2
        params["kmatrix1"] = K_train1
        params["kmatrix2"] = K_train2
        params["train_labels"] = Y_train
        kernel_two_step_learner = TwoStepRLS.createLearner(**params)
        kernel_two_step_learner.train()
        kernel_two_step_model = kernel_two_step_learner.getModel()
        kernel_two_step_testpred = kernel_two_step_model.predictWithKernelMatrices(K_test1, K_test2)
        
        #Train two-step RLS without out-of-sample rows or columns
        rowind, colind = 2, 4
        trainrowinds = range(K_train1.shape[0])
        trainrowinds.remove(rowind)
        trainrowinds.remove(colind)
        traincolinds = range(K_train2.shape[0])
        traincolinds.remove(rowind)
        traincolinds.remove(colind)
        
        params = {}
        params["regparam1"] = regparam2
        params["regparam2"] = regparam2
        params["kmatrix1"] = K_train1[np.ix_(trainrowinds, trainrowinds)]
        params["kmatrix2"] = K_train2[np.ix_(traincolinds, traincolinds)]
        params["train_labels"] = Y_train[np.ix_(trainrowinds, traincolinds)]
        kernel_kron_learner = TwoStepRLS.createLearner(**params)
        kernel_kron_learner.train()
        kernel_kron_model = kernel_kron_learner.getModel()
        #kernel_kron_testpred = kernel_kron_model.predictWithKernelMatrices(K_train1[np.ix_([rowind, colind], trainrowinds)], K_train2[np.ix_([rowind, colind], traincolinds)])
        kernel_kron_testpred = kernel_kron_model.predictWithKernelMatrices(K_train1[np.ix_([rowind], trainrowinds)], K_train2[np.ix_([colind], traincolinds)])
        print kernel_kron_testpred
        
        fcsho = kernel_two_step_learner.compute_symmetric_double_LOO()
        print fcsho[2, 4]
示例#7
0
    def test_cg_kron_rls(self):

        regparam = 0.0001

        K_train1, K_train2, Y_train, K_test1, K_test2, Y_test, X_train1, X_train2, X_test1, X_test2 = self.generate_xortask(
        )
        rows, columns = Y_train.shape
        print K_train1.shape, K_train2.shape, K_test1.shape, K_test2.shape, rows, columns
        rowstimescols = rows * columns
        indmatrix = np.mat(range(rowstimescols)).T.reshape(rows, columns)

        label_row_inds = [
            2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
            2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 0, 0, 1, 1, 2, 2, 2,
            2, 3, 4, 5, 6, 6, 7, 9
        ]
        label_col_inds = [
            2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
            2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 0, 1, 0, 1, 1, 2, 2,
            2, 3, 4, 4, 4, 5, 5, 12
        ]

        Y_train_nonzeros = []
        Y_alt = []
        B = np.mat(np.zeros((len(label_row_inds), rowstimescols)))
        for ind in range(len(label_row_inds)):
            i, j = label_row_inds[ind], label_col_inds[ind]
            Y_train_nonzeros.append(Y_train[i, j])
            Y_alt.append(Y_train[i, j])
            #B[ind, i * columns + j] = 1.
            B[ind, j * rows + i] = 1.
        #print B
        Y_train_nonzeros = np.mat(Y_train_nonzeros).T
        #Y_train_nonzeros = B * Y_train.reshape(rowstimescols, 1)

        #Train linear Kronecker RLS
        params = {}
        params["regparam"] = regparam
        params["xmatrix1"] = X_train1
        params["xmatrix2"] = X_train2
        params["train_labels"] = Y_train_nonzeros
        params["label_row_inds"] = label_row_inds
        params["label_col_inds"] = label_col_inds
        linear_kron_learner = CGKronRLS.createLearner(**params)
        linear_kron_learner.train()
        linear_kron_model = linear_kron_learner.getModel()
        linear_kron_testpred = linear_kron_model.predictWithDataMatricesAlt(
            X_test1, X_test2).reshape(X_test1.shape[0],
                                      X_test2.shape[0],
                                      order='F')

        #Train kernel Kronecker RLS
        params = {}
        params["regparam"] = regparam
        params["kmatrix1"] = K_train1
        params["kmatrix2"] = K_train2
        params["train_labels"] = Y_train_nonzeros
        params["label_row_inds"] = label_row_inds
        params["label_col_inds"] = label_col_inds
        kernel_kron_learner = CGKronRLS.createLearner(**params)
        kernel_kron_learner.train()
        kernel_kron_model = kernel_kron_learner.getModel()
        kernel_kron_testpred = kernel_kron_model.predictWithKernelMatrices(
            K_test1, K_test2)

        #Train an ordinary RLS regressor for reference
        K_Kron_train_x = np.kron(K_train2, K_train1)
        params = {}
        params["kmatrix"] = B * K_Kron_train_x * B.T
        params[
            "train_labels"] = Y_train_nonzeros  #B*(B.T * Y_train_nonzeros).reshape(rows, columns).reshape(rowstimescols, 1) # #Y_train.reshape(rowstimescols, 1)
        ordrls_learner = RLS.createLearner(**params)
        ordrls_learner.solve(regparam)
        ordrls_model = ordrls_learner.getModel()
        K_Kron_test_x = np.kron(K_test2, K_test1) * B.T
        ordrls_testpred = ordrls_model.predict(K_Kron_test_x)
        ordrls_testpred = ordrls_testpred.reshape(Y_test.shape[0],
                                                  Y_test.shape[1],
                                                  order='F')

        print linear_kron_testpred[0, 0], kernel_kron_testpred[
            0, 0], ordrls_testpred[0, 0]
        print linear_kron_testpred[0, 1], kernel_kron_testpred[
            0, 1], ordrls_testpred[0, 1]
        print linear_kron_testpred[1, 0], kernel_kron_testpred[
            1, 0], ordrls_testpred[1, 0]
        print np.mean(np.abs(linear_kron_testpred - ordrls_testpred)), np.mean(
            np.abs(kernel_kron_testpred - ordrls_testpred))
示例#8
0
 def test_cg_kron_rls(self):
     
     
     regparam = 0.0001
     
     K_train1, K_train2, Y_train, K_test1, K_test2, Y_test, X_train1, X_train2, X_test1, X_test2 = self.generate_xortask()
     rows, columns = Y_train.shape
     print K_train1.shape, K_train2.shape, K_test1.shape, K_test2.shape, rows, columns
     rowstimescols = rows * columns
     indmatrix = np.mat(range(rowstimescols)).T.reshape(rows, columns)
     
     label_row_inds = [2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,0,0,1,1,2,2,2,2,3,4,5,6,6,7,9]
     label_col_inds = [2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,0,1,0,1,1,2,2,2,3,4,4,4,5,5,12]
     
     Y_train_nonzeros = []
     Y_alt = []
     B = np.mat(np.zeros((len(label_row_inds), rowstimescols)))
     for ind in range(len(label_row_inds)):
         i, j = label_row_inds[ind], label_col_inds[ind]
         Y_train_nonzeros.append(Y_train[i, j])
         Y_alt.append(Y_train[i, j])
         #B[ind, i * columns + j] = 1.
         B[ind, j * rows + i] = 1.
     #print B
     Y_train_nonzeros = np.mat(Y_train_nonzeros).T
     #Y_train_nonzeros = B * Y_train.reshape(rowstimescols, 1)
     
     #Train linear Kronecker RLS
     params = {}
     params["regparam"] = regparam
     params["xmatrix1"] = X_train1
     params["xmatrix2"] = X_train2
     params["train_labels"] = Y_train_nonzeros
     params["label_row_inds"] = label_row_inds
     params["label_col_inds"] = label_col_inds
     linear_kron_learner = CGKronRLS.createLearner(**params)
     linear_kron_learner.train()
     linear_kron_model = linear_kron_learner.getModel()
     linear_kron_testpred = linear_kron_model.predictWithDataMatricesAlt(X_test1, X_test2).reshape(X_test1.shape[0], X_test2.shape[0], order = 'F')
     
     #Train kernel Kronecker RLS
     params = {}
     params["regparam"] = regparam
     params["kmatrix1"] = K_train1
     params["kmatrix2"] = K_train2
     params["train_labels"] = Y_train_nonzeros
     params["label_row_inds"] = label_row_inds
     params["label_col_inds"] = label_col_inds
     kernel_kron_learner = CGKronRLS.createLearner(**params)
     kernel_kron_learner.train()
     kernel_kron_model = kernel_kron_learner.getModel()
     kernel_kron_testpred = kernel_kron_model.predictWithKernelMatrices(K_test1, K_test2)
     
     #Train an ordinary RLS regressor for reference
     K_Kron_train_x = np.kron(K_train2, K_train1)
     params = {}
     params["kmatrix"] = B * K_Kron_train_x * B.T
     params["train_labels"] = Y_train_nonzeros#B*(B.T * Y_train_nonzeros).reshape(rows, columns).reshape(rowstimescols, 1) # #Y_train.reshape(rowstimescols, 1)
     ordrls_learner = RLS.createLearner(**params)
     ordrls_learner.solve(regparam)
     ordrls_model = ordrls_learner.getModel()
     K_Kron_test_x = np.kron(K_test2, K_test1) * B.T
     ordrls_testpred = ordrls_model.predict(K_Kron_test_x)
     ordrls_testpred = ordrls_testpred.reshape(Y_test.shape[0], Y_test.shape[1], order = 'F')
     
     print linear_kron_testpred[0, 0], kernel_kron_testpred[0, 0], ordrls_testpred[0, 0]
     print linear_kron_testpred[0, 1], kernel_kron_testpred[0, 1], ordrls_testpred[0, 1]
     print linear_kron_testpred[1, 0], kernel_kron_testpred[1, 0], ordrls_testpred[1, 0]
     print np.mean(np.abs(linear_kron_testpred - ordrls_testpred)), np.mean(np.abs(kernel_kron_testpred - ordrls_testpred))
示例#9
0
    def test_cg_kron_rls(self):

        regparam = 0.0001

        K_train1, K_train2, Y_train, K_test1, K_test2, Y_test, X_train1, X_train2, X_test1, X_test2 = self.generate_xortask(
        )
        rows, columns = Y_train.shape
        print K_train1.shape, K_train2.shape, K_test1.shape, K_test2.shape, rows, columns
        rowstimescols = rows * columns
        indmatrix = np.mat(range(rowstimescols)).T.reshape(rows, columns)

        #label_row_inds = [2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,0,0,1,1,2,2,2,2,3,4,5,6,6,7,9]
        #label_col_inds = [2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,0,1,0,1,1,2,2,2,3,4,4,4,5,5,12]
        pointrange = np.arange(rows, columns)
        label_row_inds, label_col_inds = np.unravel_index(
            pointrange, (rows, columns))
        Y_train_nonzeros = []
        Y_alt = []
        B = np.mat(np.zeros((len(label_row_inds), rowstimescols)))
        for ind in range(len(label_row_inds)):
            i, j = label_row_inds[ind], label_col_inds[ind]
            Y_train_nonzeros.append(Y_train[i, j])
            Y_alt.append(Y_train[i, j])
            #B[ind, i * columns + j] = 1.
            B[ind, j * rows + i] = 1.
        #print B
        Y_train_nonzeros = np.mat(Y_train_nonzeros).T
        #Y_train_nonzeros = B * Y_train.reshape(rowstimescols, 1)

        #Train an ordinary RLS regressor for reference
        K_Kron_train_x = np.kron(K_train2, K_train1)
        params = {}
        params["kernel_matrix"] = B * K_Kron_train_x * B.T
        params[
            "train_labels"] = Y_train_nonzeros  #B*(B.T * Y_train_nonzeros).reshape(rows, columns).reshape(rowstimescols, 1) # #Y_train.reshape(rowstimescols, 1)
        ordrls_learner = RLS.createLearner(**params)
        ordrls_learner.solve(regparam)
        ordrls_model = ordrls_learner.getModel()
        K_Kron_test_x = np.kron(K_test2, K_test1) * B.T
        ordrls_testpred = ordrls_model.predict(K_Kron_test_x)
        ordrls_testpred = ordrls_testpred.reshape(Y_test.shape[0],
                                                  Y_test.shape[1],
                                                  order='F')

        #Train linear Kronecker RLS
        class TestCallback():
            def __init__(self):
                self.round = 0

            def callback(self, learner):
                self.round = self.round + 1
                print self.round

            def finished(self, learner):
                print 'finished'

        params = {}
        params["regparam"] = regparam
        params["xmatrix1"] = X_train1
        params["xmatrix2"] = X_train2
        params["train_labels"] = Y_train_nonzeros
        params["label_row_inds"] = label_row_inds
        params["label_col_inds"] = label_col_inds
        tcb = TestCallback()
        params['callback'] = tcb
        linear_kron_learner = CGKronRLS.createLearner(**params)
        linear_kron_learner.train()
        linear_kron_model = linear_kron_learner.getModel()
        linear_kron_testpred = linear_kron_model.predictWithDataMatrices(
            X_test1, X_test2).reshape(X_test1.shape[0],
                                      X_test2.shape[0],
                                      order='F')

        #params["warm_start"] = linear_kron_learner.W
        #linear_kron_learner = CGKronRLS.createLearner(**params)
        #linear_kron_learner.train()
        #linear_kron_model = linear_kron_learner.getModel()
        #linear_kron_testpred = linear_kron_model.predictWithDataMatricesAlt(X_test1, X_test2).reshape(X_test1.shape[0], X_test2.shape[0], order = 'F')

        #Train kernel Kronecker RLS
        params = {}
        params["regparam"] = regparam
        params["kmatrix1"] = K_train1
        params["kmatrix2"] = K_train2
        params["train_labels"] = Y_train_nonzeros
        params["label_row_inds"] = label_row_inds
        params["label_col_inds"] = label_col_inds

        class KernelCallback():
            def __init__(self):
                self.round = 0

            def callback(self, learner):
                self.round = self.round + 1
                tp = KernelPairwiseModel(
                    learner.A, learner.label_row_inds,
                    learner.label_col_inds).predictWithKernelMatrices(
                        K_test1, K_test2)
                print self.round, np.mean(np.abs(tp - ordrls_testpred))

            def finished(self, learner):
                print 'finished'

        tcb = KernelCallback()
        params['callback'] = tcb
        kernel_kron_learner = CGKronRLS.createLearner(**params)
        kernel_kron_learner.train()
        kernel_kron_model = kernel_kron_learner.getModel()
        kernel_kron_testpred = kernel_kron_model.predictWithKernelMatrices(
            K_test1, K_test2)
        kernel_kron_testpred_alt = kernel_kron_model.predictWithKernelMatrices(
            K_test1, K_test2, row_inds=[0, 0, 1], col_inds=[0, 1, 0])

        print linear_kron_testpred[0, 0], kernel_kron_testpred[
            0, 0], kernel_kron_testpred_alt[0], ordrls_testpred[0, 0]
        print linear_kron_testpred[0, 1], kernel_kron_testpred[
            0, 1], kernel_kron_testpred_alt[1], ordrls_testpred[0, 1]
        print linear_kron_testpred[1, 0], kernel_kron_testpred[
            1, 0], kernel_kron_testpred_alt[2], ordrls_testpred[1, 0]
        print np.mean(np.abs(linear_kron_testpred - ordrls_testpred)), np.mean(
            np.abs(kernel_kron_testpred - ordrls_testpred))
示例#10
0
 def test_cg_kron_rls(self):
     
     
     regparam = 0.0001
     
     K_train1, K_train2, Y_train, K_test1, K_test2, Y_test, X_train1, X_train2, X_test1, X_test2 = self.generate_xortask()
     rows, columns = Y_train.shape
     print K_train1.shape, K_train2.shape, K_test1.shape, K_test2.shape, rows, columns
     rowstimescols = rows * columns
     indmatrix = np.mat(range(rowstimescols)).T.reshape(rows, columns)
     
     #label_row_inds = [2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,0,0,1,1,2,2,2,2,3,4,5,6,6,7,9]
     #label_col_inds = [2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,0,1,0,1,1,2,2,2,3,4,4,4,5,5,12]
     pointrange = np.arange(rows, columns)
     label_row_inds, label_col_inds = np.unravel_index(pointrange, (rows, columns)) 
     Y_train_nonzeros = []
     Y_alt = []
     B = np.mat(np.zeros((len(label_row_inds), rowstimescols)))
     for ind in range(len(label_row_inds)):
         i, j = label_row_inds[ind], label_col_inds[ind]
         Y_train_nonzeros.append(Y_train[i, j])
         Y_alt.append(Y_train[i, j])
         #B[ind, i * columns + j] = 1.
         B[ind, j * rows + i] = 1.
     #print B
     Y_train_nonzeros = np.mat(Y_train_nonzeros).T
     #Y_train_nonzeros = B * Y_train.reshape(rowstimescols, 1)
     
     #Train an ordinary RLS regressor for reference
     K_Kron_train_x = np.kron(K_train2, K_train1)
     params = {}
     params["kernel_matrix"] = B * K_Kron_train_x * B.T
     params["train_labels"] = Y_train_nonzeros#B*(B.T * Y_train_nonzeros).reshape(rows, columns).reshape(rowstimescols, 1) # #Y_train.reshape(rowstimescols, 1)
     ordrls_learner = RLS.createLearner(**params)
     ordrls_learner.solve(regparam)
     ordrls_model = ordrls_learner.getModel()
     K_Kron_test_x = np.kron(K_test2, K_test1) * B.T
     ordrls_testpred = ordrls_model.predict(K_Kron_test_x)
     ordrls_testpred = ordrls_testpred.reshape(Y_test.shape[0], Y_test.shape[1], order = 'F')
     
     #Train linear Kronecker RLS
     class TestCallback():
         def __init__(self):
             self.round = 0
         def callback(self, learner):
             self.round = self.round + 1
             print self.round
         def finished(self, learner):
             print 'finished'
     params = {}
     params["regparam"] = regparam
     params["xmatrix1"] = X_train1
     params["xmatrix2"] = X_train2
     params["train_labels"] = Y_train_nonzeros
     params["label_row_inds"] = label_row_inds
     params["label_col_inds"] = label_col_inds
     tcb = TestCallback()
     params['callback'] = tcb
     linear_kron_learner = CGKronRLS.createLearner(**params)
     linear_kron_learner.train()
     linear_kron_model = linear_kron_learner.getModel()
     linear_kron_testpred = linear_kron_model.predictWithDataMatrices(X_test1, X_test2).reshape(X_test1.shape[0], X_test2.shape[0], order = 'F')
     
     #params["warm_start"] = linear_kron_learner.W
     #linear_kron_learner = CGKronRLS.createLearner(**params)
     #linear_kron_learner.train()
     #linear_kron_model = linear_kron_learner.getModel()
     #linear_kron_testpred = linear_kron_model.predictWithDataMatricesAlt(X_test1, X_test2).reshape(X_test1.shape[0], X_test2.shape[0], order = 'F')
     
     #Train kernel Kronecker RLS
     params = {}
     params["regparam"] = regparam
     params["kmatrix1"] = K_train1
     params["kmatrix2"] = K_train2
     params["train_labels"] = Y_train_nonzeros
     params["label_row_inds"] = label_row_inds
     params["label_col_inds"] = label_col_inds
     class KernelCallback():
         def __init__(self):
             self.round = 0
         def callback(self, learner):
             self.round = self.round + 1
             tp = KernelPairwiseModel(learner.A, learner.label_row_inds, learner.label_col_inds).predictWithKernelMatrices(K_test1, K_test2)
             print self.round, np.mean(np.abs(tp - ordrls_testpred))
         def finished(self, learner):
             print 'finished'
     tcb = KernelCallback()
     params['callback'] = tcb
     kernel_kron_learner = CGKronRLS.createLearner(**params)
     kernel_kron_learner.train()
     kernel_kron_model = kernel_kron_learner.getModel()
     kernel_kron_testpred = kernel_kron_model.predictWithKernelMatrices(K_test1, K_test2)
     
     print linear_kron_testpred[0, 0], kernel_kron_testpred[0, 0], ordrls_testpred[0, 0]
     print linear_kron_testpred[0, 1], kernel_kron_testpred[0, 1], ordrls_testpred[0, 1]
     print linear_kron_testpred[1, 0], kernel_kron_testpred[1, 0], ordrls_testpred[1, 0]
     print np.mean(np.abs(linear_kron_testpred - ordrls_testpred)), np.mean(np.abs(kernel_kron_testpred - ordrls_testpred))