def single_holdout(XD, XT, Y, label_row_inds, label_col_inds, train_sets, val_sets, measure, regparam, rls,
                   maxiter=50, inneriter=100, incindices = None):
    cvrounds = len(train_sets)
    all_predictions = np.zeros((maxiter, Y.shape[0]))
    #print 'general nfold.'
    trainindices = train_sets[0]
    valindices = val_sets[0]
    class TestCallback(CF):
        def __init__(self):
            #self.iter = 0
            self.results = []
        def callback(self, learner):
            P = np.mat(learner.getModel().predictWithDataMatricesAlt(XD, XT, label_row_inds[valindices], label_col_inds[valindices])).T
            #print self.iter, measure(Y[valindices], P)
            self.results.append(measure(Y[valindices], P))
            #self.iter += 1
        def get_results(self):
            return self.results;
    params = {}
    params["xmatrix1"] = XD
    params["xmatrix2"] = XT
    params["train_labels"] = Y[trainindices]
    params["label_row_inds"] = label_row_inds[trainindices]
    params["label_col_inds"] = label_col_inds[trainindices]
    params["maxiter"] = maxiter
    params["inneriter"] = inneriter
    callback = TestCallback()
    params['callback'] = callback
    if rls:
        learner = CGKronRLS.createLearner(**params)
    else:
        learner = KronSVM.createLearner(**params)
    learner.solve_linear(regparam)
    return callback.get_results()
def general_nfold_cv_no_imputation(XD, XT, Y, label_row_inds, label_col_inds, measure, train_sets, val_sets, regparam, rls, incindices = None):

    cvrounds = len(train_sets)
    maxiter = 10
    all_predictions = np.zeros((maxiter, Y.shape[0]))
    print 'general nfold.'
    for foldind in range(cvrounds):
        trainindices = train_sets[foldind]
        valindices = val_sets[foldind]
        class TestCallback(CF):
            def __init__(self):
                self.iter = 0
            def callback(self, learner):
                all_predictions[self.iter][valindices] = np.mat(learner.getModel().predictWithDataMatricesAlt(XD, XT, label_row_inds[valindices], label_col_inds[valindices])).T
                self.iter += 1
        params = {}
        params["xmatrix1"] = XD
        params["xmatrix2"] = XT
        params["train_labels"] = Y[trainindices]
        params["label_row_inds"] = label_row_inds[trainindices]
        params["label_col_inds"] = label_col_inds[trainindices]
        params["maxiter"] = maxiter
        params['callback'] = TestCallback()
        if rls:
            learner = CGKronRLS.createLearner(**params)
        else:
            learner = KronSVM.createLearner(**params)
        #regparam = 2. ** (15)
        learner.solve_linear(regparam)
        print foldind, 'done'
    print
    bestperf = -float('Inf')
    bestparam = None
    for iterind in range(maxiter):
        if incindices == None:
            perf = measure(Y, all_predictions[iterind])
        else:
            perf = measure(Y[incindices], all_predictions[iterind][incindices])
        if perf > bestperf:
            bestperf = perf
            bestparam = iterind
        print iterind, perf
    return bestparam, bestperf, all_predictions
def artificial_data_experiment():
    maxiter = 1
    m1 = 1000
    m2 = 10000#0
    mm = m1 * m2
    d = 100
    l = 10000#0
    params = {}
    params["xmatrix1"] = np.random.rand(m1, d)
    params["xmatrix2"] = np.random.rand(m2, d)
    labelinds = pyrandom.sample(range(mm), l)
    rows, cols = np.unravel_index(range(mm), (m1, m2))
    label_row_inds = rows[labelinds]
    label_col_inds = cols[labelinds]
    params["train_labels"] = np.mat(labelinds, dtype=np.float64).T
    params["label_row_inds"] = label_row_inds
    params["label_col_inds"] = label_col_inds
    params["maxiter"] = maxiter
    learner = CGKronRLS.createLearner(**params)
    regparam = 2. ** (20)
    learner.solve_linear(regparam)
示例#4
0
 def test_cg_kron_rls(self):
     
     regparam = 0.0001
     
     K_train1, K_train2, Y_train, K_test1, K_test2, Y_test, X_train1, X_train2, X_test1, X_test2 = self.generate_xortask()
     #K_train1, K_train2, Y_train, K_test1, K_test2, Y_test, X_train1, X_train2, X_test1, X_test2 = self.generate_xortask(trainpos1 = 1, trainneg1 = 1, trainpos2 = 1, trainneg2 = 1, testpos1 = 1, testneg1 = 1, testpos2 = 1, testneg2 = 1)
     Y_train = Y_train.ravel(order = 'F')
     Y_test = Y_test.ravel(order = 'F')
     train_rows, train_columns = K_train1.shape[0], K_train2.shape[0]
     test_rows, test_columns = K_test1.shape[0], K_test2.shape[0]
     rowstimescols = train_rows * train_columns
     allindices = np.arange(rowstimescols)
     all_label_row_inds, all_label_col_inds = np.unravel_index(allindices, (train_rows, train_columns), order = 'F')
     #incinds = np.random.permutation(allindices)
     #incinds = np.random.choice(allindices, 50, replace = False)
     incinds = np.random.choice(allindices, 10, replace = False)
     label_row_inds, label_col_inds = all_label_row_inds[incinds], all_label_col_inds[incinds]
     print(train_rows, train_columns)
     print(np.unique(label_row_inds))
     print(np.unique(label_col_inds))
     #foo
     Y_train_known_outputs = Y_train.reshape(rowstimescols, order = 'F')[incinds]
     
     alltestindices = np.arange(test_rows * test_columns)
     all_test_label_row_inds, all_test_label_col_inds = np.unravel_index(alltestindices, (test_rows, test_columns), order = 'F')
     
     #Train an ordinary RLS regressor for reference
     params = {}
     params["X"] = np.kron(K_train2, K_train1)[np.ix_(incinds, incinds)]
     params["kernel"] = "PrecomputedKernel"
     params["Y"] = Y_train_known_outputs
     params["regparam"] = regparam
     ordrls_learner = RLS(**params)
     ordrls_model = ordrls_learner.predictor
     K_Kron_test = np.kron(K_test2, K_test1)[:, incinds]
     ordrls_testpred = ordrls_model.predict(K_Kron_test)
     ordrls_testpred = ordrls_testpred.reshape((test_rows, test_columns), order = 'F')
     
     #Train linear Kronecker RLS
     class TestCallback():
         def __init__(self):
             self.round = 0
         def callback(self, learner):
             self.round = self.round + 1
             tp = LinearPairwisePredictor(learner.W).predict(X_test1, X_test2)
             print(str(self.round) + ' ' + str(np.mean(np.abs(tp - ordrls_testpred.ravel(order = 'F')))))
         def finished(self, learner):
             print('Training of linear Kronecker RLS finished')
     params = {}
     params["regparam"] = regparam
     params["X1"] = X_train1
     params["X2"] = X_train2
     params["Y"] = Y_train_known_outputs
     params["label_row_inds"] = label_row_inds
     params["label_col_inds"] = label_col_inds
     tcb = TestCallback()
     params['callback'] = tcb
     linear_kron_learner = CGKronRLS(**params)
     linear_kron_testpred = linear_kron_learner.predict(X_test1, X_test2).reshape((test_rows, test_columns), order = 'F')
     linear_kron_testpred_alt = linear_kron_learner.predict(X_test1, X_test2, [0, 0, 1], [0, 1, 0])
     
     #Train kernel Kronecker RLS
     params = {}
     params["regparam"] = regparam
     params["K1"] = K_train1
     params["K2"] = K_train2
     params["Y"] = Y_train_known_outputs
     params["label_row_inds"] = label_row_inds
     params["label_col_inds"] = label_col_inds
     class KernelCallback():
         def __init__(self):
             self.round = 0
         def callback(self, learner):
             self.round = self.round + 1
             tp = KernelPairwisePredictor(learner.A, learner.input1_inds, learner.input2_inds).predict(K_test1, K_test2)
             print(str(self.round) + ' ' + str(np.mean(np.abs(tp - ordrls_testpred.ravel(order = 'F')))))
         def finished(self, learner):
             print('Training of kernel Kronecker RLS finished')
     tcb = KernelCallback()
     params['callback'] = tcb
     kernel_kron_learner = CGKronRLS(**params)
     kernel_kron_testpred = kernel_kron_learner.predict(K_test1, K_test2).reshape((test_rows, test_columns), order = 'F')
     kernel_kron_testpred_alt = kernel_kron_learner.predict(K_test1, K_test2, [0, 0, 1], [0, 1, 0])
     
     print('Predictions: Linear CgKronRLS, Kernel CgKronRLS, ordinary RLS')
     print('[0, 0]: ' + str(linear_kron_testpred[0, 0]) + ' ' + str(kernel_kron_testpred[0, 0]) + ' ' + str(ordrls_testpred[0, 0]), linear_kron_testpred_alt[0], kernel_kron_testpred_alt[0])
     print('[0, 1]: ' + str(linear_kron_testpred[0, 1]) + ' ' + str(kernel_kron_testpred[0, 1]) + ' ' + str(ordrls_testpred[0, 1]), linear_kron_testpred_alt[1], kernel_kron_testpred_alt[1])
     print('[1, 0]: ' + str(linear_kron_testpred[1, 0]) + ' ' + str(kernel_kron_testpred[1, 0]) + ' ' + str(ordrls_testpred[1, 0]), linear_kron_testpred_alt[2], kernel_kron_testpred_alt[2])
     print('Meanabsdiff: linear KronRLS - ordinary RLS, kernel KronRLS - ordinary RLS')
     print(str(np.mean(np.abs(linear_kron_testpred - ordrls_testpred))) + ' ' + str(np.mean(np.abs(kernel_kron_testpred - ordrls_testpred))))
     np.testing.assert_almost_equal(linear_kron_testpred, ordrls_testpred, decimal=5)
     np.testing.assert_almost_equal(kernel_kron_testpred, ordrls_testpred, decimal=4)
     
     #Train multiple kernel Kronecker RLS
     params = {}
     params["regparam"] = regparam
     params["K1"] = [K_train1, K_train1]
     params["K2"] = [K_train2, K_train2]
     params["weights"] = [1. / 3, 2. / 3]
     params["Y"] = Y_train_known_outputs
     params["label_row_inds"] = [label_row_inds, label_row_inds]
     params["label_col_inds"] = [label_col_inds, label_col_inds]
     class KernelCallback():
         def __init__(self):
             self.round = 0
         def callback(self, learner):
             self.round = self.round + 1
             tp = KernelPairwisePredictor(learner.A, learner.input1_inds, learner.input2_inds, params["weights"]).predict([K_test1, K_test1], [K_test2, K_test2])
             print(str(self.round) + ' ' + str(np.mean(np.abs(tp - ordrls_testpred.ravel(order = 'F')))))
         def finished(self, learner):
             print('finished')
     tcb = KernelCallback()
     params['callback'] = tcb
     mkl_kernel_kron_learner = CGKronRLS(**params)
     mkl_kernel_kron_testpred = mkl_kernel_kron_learner.predict([K_test1, K_test1], [K_test2, K_test2]).reshape((test_rows, test_columns), order = 'F')
     #kernel_kron_testpred_alt = kernel_kron_learner.predict(K_test1, K_test2, [0, 0, 1], [0, 1, 0])
     
     
     #Train linear multiple kernel Kronecker RLS
     params = {}
     params["regparam"] = regparam
     params["X1"] = [X_train1, X_train1]
     params["X2"] = [X_train2, X_train2]
     params["weights"] = [1. / 3, 2. / 3]
     params["Y"] = Y_train_known_outputs
     params["label_row_inds"] = [label_row_inds, label_row_inds]
     params["label_col_inds"] = [label_col_inds, label_col_inds]
     mkl_linear_kron_learner = CGKronRLS(**params)
     mkl_linear_kron_testpred = mkl_linear_kron_learner.predict([X_test1, X_test1], [X_test2, X_test2]).reshape((test_rows, test_columns), order = 'F')
     #kernel_kron_testpred_alt = kernel_kron_learner.predict(K_test1, K_test2, [0, 0, 1], [0, 1, 0])
     
     print('Predictions: Linear CgKronRLS, MKL Kernel CgKronRLS, ordinary RLS, MKL linear CgKronRLS')
     print('[0, 0]: ' + str(linear_kron_testpred[0, 0]) + ' ' + str(mkl_kernel_kron_testpred[0, 0]) + ' ' + str(ordrls_testpred[0, 0]) + ' ' + str(mkl_linear_kron_testpred[0, 0]))#, linear_kron_testpred_alt[0], kernel_kron_testpred_alt[0]
     print('[0, 1]: ' + str(linear_kron_testpred[0, 1]) + ' ' + str(mkl_kernel_kron_testpred[0, 1]) + ' ' + str(ordrls_testpred[0, 1]) + ' ' + str(mkl_linear_kron_testpred[0, 1]))#, linear_kron_testpred_alt[1], kernel_kron_testpred_alt[1]
     print('[1, 0]: ' + str(linear_kron_testpred[1, 0]) + ' ' + str(mkl_kernel_kron_testpred[1, 0]) + ' ' + str(ordrls_testpred[1, 0]) + ' ' + str(mkl_linear_kron_testpred[1, 0]))#, linear_kron_testpred_alt[2], kernel_kron_testpred_alt[2]
     print('Meanabsdiff: MKL kernel KronRLS - ordinary RLS')
     print(str(np.mean(np.abs(mkl_kernel_kron_testpred - ordrls_testpred))))
     np.testing.assert_almost_equal(mkl_kernel_kron_testpred, ordrls_testpred, decimal=3)
     print('Meanabsdiff: MKL linear KronRLS - ordinary RLS')
     print(str(np.mean(np.abs(mkl_linear_kron_testpred - ordrls_testpred))))
     np.testing.assert_almost_equal(mkl_linear_kron_testpred, ordrls_testpred, decimal=3)
     #'''
     
     
     
     
     
     #Train polynomial kernel Kronecker RLS
     params = {}
     params["regparam"] = regparam
     #params["K1"] = [K_train1, K_train1, K_train2]
     #params["K2"] = [K_train1, K_train2, K_train2]
     #params["weights"] = [1., 2., 1.]
     params["pko"] = pairwise_kernel_operator.PairwiseKernelOperator(
                             [K_train1, K_train1, K_train2],
                             [K_train1, K_train2, K_train2],
                             [label_row_inds, label_row_inds, label_col_inds],
                             [label_row_inds, label_col_inds, label_col_inds],
                             [label_row_inds, label_row_inds, label_col_inds],
                             [label_row_inds, label_col_inds, label_col_inds],
                             [1., 2., 1.])
     params["Y"] = Y_train_known_outputs
     #params["label_row_inds"] = [label_row_inds, label_row_inds, label_col_inds]
     #params["label_col_inds"] = [label_row_inds, label_col_inds, label_col_inds]
     class KernelCallback():
         def __init__(self):
             self.round = 0
         def callback(self, learner):
             self.round = self.round + 1
             #tp = KernelPairwisePredictor(learner.A, learner.input1_inds, learner.input2_inds, params["weights"]).predict([K_test1, K_test1], [K_test2, K_test2])
             #print(str(self.round) + ' ' + str(np.mean(np.abs(tp - ordrls_testpred.ravel(order = 'F')))))
         def finished(self, learner):
             print('finished')
     tcb = KernelCallback()
     params['callback'] = tcb
     poly_kernel_kron_learner = CGKronRLS(**params)
     pko = pairwise_kernel_operator.PairwiseKernelOperator(
                             [K_test1, K_test1, K_test2],
                             [K_test1, K_test2, K_test2],
                             [all_test_label_row_inds, all_test_label_row_inds, all_test_label_col_inds],
                             [all_test_label_row_inds, all_test_label_col_inds, all_test_label_col_inds],
                             [label_row_inds, label_row_inds, label_col_inds],
                             [label_row_inds, label_col_inds, label_col_inds],
                             [1., 2., 1.])
     #poly_kernel_kron_testpred = poly_kernel_kron_learner.predict(pko = pko)
     poly_kernel_kron_testpred = poly_kernel_kron_learner.predict([K_test1, K_test1, K_test2], [K_test1, K_test2, K_test2], [all_test_label_row_inds, all_test_label_row_inds, all_test_label_col_inds], [all_test_label_row_inds, all_test_label_col_inds, all_test_label_col_inds])
     #print(poly_kernel_kron_testpred, 'Polynomial kernel via CGKronRLS')
     
     #Train an ordinary RLS regressor with polynomial kernel for reference
     params = {}
     params["X"] = np.hstack([np.kron(np.ones((X_train2.shape[0], 1)), X_train1), np.kron(X_train2, np.ones((X_train1.shape[0], 1)))])[incinds]
     #params["X"] = np.hstack([np.kron(X_train1, np.ones((X_train2.shape[0], 1))), np.kron(np.ones((X_train1.shape[0], 1)), X_train2)])[incinds]
     params["kernel"] = "PolynomialKernel"
     params["Y"] = Y_train_known_outputs
     params["regparam"] = regparam
     ordrls_poly_kernel_learner = RLS(**params)
     X_dir_test = np.hstack([np.kron(np.ones((X_test2.shape[0], 1)), X_test1), np.kron(X_test2, np.ones((X_test1.shape[0], 1)))])
     #X_dir_test = np.hstack([np.kron(X_test1, np.ones((X_test2.shape[0], 1))), np.kron(np.ones((X_test1.shape[0], 1)), X_test2)])
     ordrls_poly_kernel_testpred = ordrls_poly_kernel_learner.predict(X_dir_test)
     #print(ordrls_poly_kernel_testpred, 'Ord. poly RLS')
     print('Meanabsdiff: Polynomial kernel KronRLS - Ordinary polynomial kernel RLS')
     print(str(np.mean(np.abs(poly_kernel_kron_testpred - ordrls_poly_kernel_testpred))))
     
     '''
     #Train polynomial kernel Kronecker RLS
     params = {}
     params["regparam"] = regparam
     #params["X1"] = [X_train1, X_train1, X_train2]
     #params["X2"] = [X_train1, X_train2, X_train2]
     params["K1"] = [K_train1, K_train1, K_train2]
     params["K2"] = [K_train1, K_train2, K_train2]
     params["weights"] = [1., 2., 1.]
     params["Y"] = Y_train_known_outputs
     params["label_row_inds"] = [label_row_inds, label_row_inds, label_col_inds]
     params["label_col_inds"] = [label_row_inds, label_col_inds, label_col_inds]
     class KernelCallback():
         def __init__(self):
             self.round = 0
         def callback(self, learner):
             self.round = self.round + 1
             #tp = KernelPairwisePredictor(learner.A, learner.input1_inds, learner.input2_inds, params["weights"]).predict([K_test1, K_test1], [K_test2, K_test2])
             #print(str(self.round) + ' ' + str(np.mean(np.abs(tp - ordrls_testpred.ravel(order = 'F')))))
         def finished(self, learner):
             print('finished')
     tcb = KernelCallback()
     params['callback'] = tcb
     poly_kernel_linear_kron_learner = CGKronRLS(**params)
     #poly_kernel_linear_kron_testpred = poly_kernel_linear_kron_learner.predict([X_test1, X_test1, X_test2], [X_test1, X_test2, X_test2], [all_test_label_row_inds, all_test_label_row_inds, all_test_label_col_inds], [all_test_label_row_inds, all_test_label_col_inds, all_test_label_col_inds])
     poly_kernel_linear_kron_testpred = poly_kernel_linear_kron_learner.predict([K_test1, K_test1, K_test2], [K_test1, K_test2, K_test2], [all_test_label_row_inds, all_test_label_row_inds, all_test_label_col_inds], [all_test_label_row_inds, all_test_label_col_inds, all_test_label_col_inds])
     #print(poly_kernel_kron_testpred, 'Polynomial kernel via CGKronRLS (linear)')
     print('Meanabsdiff: Polynomial kernel KronRLS (linear) - Ordinary polynomial kernel RLS')
     print(str(np.mean(np.abs(poly_kernel_linear_kron_testpred - ordrls_poly_kernel_testpred))))
     '''
     
     
     
     
     
     
     
     '''
示例#5
0
    def test_cg_kron_rls(self):

        regparam = 0.0001

        K_train1, K_train2, Y_train, K_test1, K_test2, Y_test, X_train1, X_train2, X_test1, X_test2 = self.generate_xortask(
        )
        rows, columns = Y_train.shape
        print K_train1.shape, K_train2.shape, K_test1.shape, K_test2.shape, rows, columns
        rowstimescols = rows * columns
        indmatrix = np.mat(range(rowstimescols)).T.reshape(rows, columns)

        label_row_inds = [
            2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
            2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 0, 0, 1, 1, 2, 2, 2,
            2, 3, 4, 5, 6, 6, 7, 9
        ]
        label_col_inds = [
            2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
            2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 0, 1, 0, 1, 1, 2, 2,
            2, 3, 4, 4, 4, 5, 5, 12
        ]

        Y_train_nonzeros = []
        Y_alt = []
        B = np.mat(np.zeros((len(label_row_inds), rowstimescols)))
        for ind in range(len(label_row_inds)):
            i, j = label_row_inds[ind], label_col_inds[ind]
            Y_train_nonzeros.append(Y_train[i, j])
            Y_alt.append(Y_train[i, j])
            #B[ind, i * columns + j] = 1.
            B[ind, j * rows + i] = 1.
        #print B
        Y_train_nonzeros = np.mat(Y_train_nonzeros).T
        #Y_train_nonzeros = B * Y_train.reshape(rowstimescols, 1)

        #Train linear Kronecker RLS
        params = {}
        params["regparam"] = regparam
        params["xmatrix1"] = X_train1
        params["xmatrix2"] = X_train2
        params["train_labels"] = Y_train_nonzeros
        params["label_row_inds"] = label_row_inds
        params["label_col_inds"] = label_col_inds
        linear_kron_learner = CGKronRLS.createLearner(**params)
        linear_kron_learner.train()
        linear_kron_model = linear_kron_learner.getModel()
        linear_kron_testpred = linear_kron_model.predictWithDataMatricesAlt(
            X_test1, X_test2).reshape(X_test1.shape[0],
                                      X_test2.shape[0],
                                      order='F')

        #Train kernel Kronecker RLS
        params = {}
        params["regparam"] = regparam
        params["kmatrix1"] = K_train1
        params["kmatrix2"] = K_train2
        params["train_labels"] = Y_train_nonzeros
        params["label_row_inds"] = label_row_inds
        params["label_col_inds"] = label_col_inds
        kernel_kron_learner = CGKronRLS.createLearner(**params)
        kernel_kron_learner.train()
        kernel_kron_model = kernel_kron_learner.getModel()
        kernel_kron_testpred = kernel_kron_model.predictWithKernelMatrices(
            K_test1, K_test2)

        #Train an ordinary RLS regressor for reference
        K_Kron_train_x = np.kron(K_train2, K_train1)
        params = {}
        params["kmatrix"] = B * K_Kron_train_x * B.T
        params[
            "train_labels"] = Y_train_nonzeros  #B*(B.T * Y_train_nonzeros).reshape(rows, columns).reshape(rowstimescols, 1) # #Y_train.reshape(rowstimescols, 1)
        ordrls_learner = RLS.createLearner(**params)
        ordrls_learner.solve(regparam)
        ordrls_model = ordrls_learner.getModel()
        K_Kron_test_x = np.kron(K_test2, K_test1) * B.T
        ordrls_testpred = ordrls_model.predict(K_Kron_test_x)
        ordrls_testpred = ordrls_testpred.reshape(Y_test.shape[0],
                                                  Y_test.shape[1],
                                                  order='F')

        print linear_kron_testpred[0, 0], kernel_kron_testpred[
            0, 0], ordrls_testpred[0, 0]
        print linear_kron_testpred[0, 1], kernel_kron_testpred[
            0, 1], ordrls_testpred[0, 1]
        print linear_kron_testpred[1, 0], kernel_kron_testpred[
            1, 0], ordrls_testpred[1, 0]
        print np.mean(np.abs(linear_kron_testpred - ordrls_testpred)), np.mean(
            np.abs(kernel_kron_testpred - ordrls_testpred))
示例#6
0
    def test_cg_kron_rls(self):

        regparam = 0.0001

        K_train1, K_train2, Y_train, K_test1, K_test2, Y_test, X_train1, X_train2, X_test1, X_test2 = self.generate_xortask(
        )
        #K_train1, K_train2, Y_train, K_test1, K_test2, Y_test, X_train1, X_train2, X_test1, X_test2 = self.generate_xortask(trainpos1 = 1, trainneg1 = 1, trainpos2 = 1, trainneg2 = 1, testpos1 = 1, testneg1 = 1, testpos2 = 1, testneg2 = 1)
        Y_train = Y_train.ravel(order='F')
        Y_test = Y_test.ravel(order='F')
        train_rows, train_columns = K_train1.shape[0], K_train2.shape[0]
        test_rows, test_columns = K_test1.shape[0], K_test2.shape[0]
        rowstimescols = train_rows * train_columns
        allindices = np.arange(rowstimescols)
        all_label_row_inds, all_label_col_inds = np.unravel_index(
            allindices, (train_rows, train_columns), order='F')
        #incinds = np.random.permutation(allindices)
        #incinds = np.random.choice(allindices, 50, replace = False)
        incinds = np.random.choice(allindices, 40, replace=False)
        label_row_inds, label_col_inds = all_label_row_inds[
            incinds], all_label_col_inds[incinds]
        Y_train_known_outputs = Y_train.reshape(rowstimescols,
                                                order='F')[incinds]

        alltestindices = np.arange(test_rows * test_columns)
        all_test_label_row_inds, all_test_label_col_inds = np.unravel_index(
            alltestindices, (test_rows, test_columns), order='F')

        #Train an ordinary RLS regressor for reference
        params = {}
        params["X"] = np.kron(K_train2, K_train1)[np.ix_(incinds, incinds)]
        params["kernel"] = "PrecomputedKernel"
        params["Y"] = Y_train_known_outputs
        params["regparam"] = regparam
        ordrls_learner = RLS(**params)
        ordrls_model = ordrls_learner.predictor
        K_Kron_test = np.kron(K_test2, K_test1)[:, incinds]
        ordrls_testpred = ordrls_model.predict(K_Kron_test)
        ordrls_testpred = ordrls_testpred.reshape((test_rows, test_columns),
                                                  order='F')

        #Train linear Kronecker RLS
        class TestCallback():
            def __init__(self):
                self.round = 0

            def callback(self, learner):
                self.round = self.round + 1
                tp = LinearPairwisePredictor(learner.W).predict(
                    X_test1, X_test2)
                print(
                    str(self.round) + ' ' +
                    str(np.mean(np.abs(tp -
                                       ordrls_testpred.ravel(order='F')))))

            def finished(self, learner):
                print('finished')

        params = {}
        params["regparam"] = regparam
        params["X1"] = X_train1
        params["X2"] = X_train2
        params["Y"] = Y_train_known_outputs
        params["label_row_inds"] = label_row_inds
        params["label_col_inds"] = label_col_inds
        tcb = TestCallback()
        params['callback'] = tcb
        linear_kron_learner = CGKronRLS(**params)
        linear_kron_testpred = linear_kron_learner.predict(
            X_test1, X_test2).reshape((test_rows, test_columns), order='F')
        linear_kron_testpred_alt = linear_kron_learner.predict(
            X_test1, X_test2, [0, 0, 1], [0, 1, 0])

        #Train kernel Kronecker RLS
        params = {}
        params["regparam"] = regparam
        params["K1"] = K_train1
        params["K2"] = K_train2
        params["Y"] = Y_train_known_outputs
        params["label_row_inds"] = label_row_inds
        params["label_col_inds"] = label_col_inds

        class KernelCallback():
            def __init__(self):
                self.round = 0

            def callback(self, learner):
                self.round = self.round + 1
                tp = KernelPairwisePredictor(learner.A, learner.input1_inds,
                                             learner.input2_inds).predict(
                                                 K_test1, K_test2)
                print(
                    str(self.round) + ' ' +
                    str(np.mean(np.abs(tp -
                                       ordrls_testpred.ravel(order='F')))))

            def finished(self, learner):
                print('finished')

        tcb = KernelCallback()
        params['callback'] = tcb
        kernel_kron_learner = CGKronRLS(**params)
        kernel_kron_testpred = kernel_kron_learner.predict(
            K_test1, K_test2).reshape((test_rows, test_columns), order='F')
        kernel_kron_testpred_alt = kernel_kron_learner.predict(
            K_test1, K_test2, [0, 0, 1], [0, 1, 0])

        print('Predictions: Linear CgKronRLS, Kernel CgKronRLS, ordinary RLS')
        print('[0, 0]: ' + str(linear_kron_testpred[0, 0]) + ' ' +
              str(kernel_kron_testpred[0, 0]) + ' ' +
              str(ordrls_testpred[0, 0])
              )  #, linear_kron_testpred_alt[0], kernel_kron_testpred_alt[0]
        print('[0, 1]: ' + str(linear_kron_testpred[0, 1]) + ' ' +
              str(kernel_kron_testpred[0, 1]) + ' ' +
              str(ordrls_testpred[0, 1])
              )  #, linear_kron_testpred_alt[1], kernel_kron_testpred_alt[1]
        print('[1, 0]: ' + str(linear_kron_testpred[1, 0]) + ' ' +
              str(kernel_kron_testpred[1, 0]) + ' ' +
              str(ordrls_testpred[1, 0])
              )  #, linear_kron_testpred_alt[2], kernel_kron_testpred_alt[2]
        print(
            'Meanabsdiff: linear KronRLS - ordinary RLS, kernel KronRLS - ordinary RLS'
        )
        print(
            str(np.mean(np.abs(linear_kron_testpred - ordrls_testpred))) +
            ' ' + str(np.mean(np.abs(kernel_kron_testpred - ordrls_testpred))))
        np.testing.assert_almost_equal(linear_kron_testpred,
                                       ordrls_testpred,
                                       decimal=5)
        np.testing.assert_almost_equal(kernel_kron_testpred,
                                       ordrls_testpred,
                                       decimal=4)

        #Train multiple kernel Kronecker RLS
        params = {}
        params["regparam"] = regparam
        params["K1"] = [K_train1, K_train1]
        params["K2"] = [K_train2, K_train2]
        params["weights"] = [1. / 3, 2. / 3]
        params["Y"] = Y_train_known_outputs
        params["label_row_inds"] = [label_row_inds, label_row_inds]
        params["label_col_inds"] = [label_col_inds, label_col_inds]

        class KernelCallback():
            def __init__(self):
                self.round = 0

            def callback(self, learner):
                self.round = self.round + 1
                tp = KernelPairwisePredictor(
                    learner.A, learner.input1_inds, learner.input2_inds,
                    params["weights"]).predict([K_test1, K_test1],
                                               [K_test2, K_test2])
                print(
                    str(self.round) + ' ' +
                    str(np.mean(np.abs(tp -
                                       ordrls_testpred.ravel(order='F')))))

            def finished(self, learner):
                print('finished')

        tcb = KernelCallback()
        params['callback'] = tcb
        mkl_kernel_kron_learner = CGKronRLS(**params)
        mkl_kernel_kron_testpred = mkl_kernel_kron_learner.predict(
            [K_test1, K_test1], [K_test2, K_test2]).reshape(
                (test_rows, test_columns), order='F')
        #kernel_kron_testpred_alt = kernel_kron_learner.predict(K_test1, K_test2, [0, 0, 1], [0, 1, 0])

        print(
            'Predictions: Linear CgKronRLS, MKL Kernel CgKronRLS, ordinary RLS'
        )
        print('[0, 0]: ' + str(linear_kron_testpred[0, 0]) + ' ' +
              str(mkl_kernel_kron_testpred[0, 0]) + ' ' +
              str(ordrls_testpred[0, 0])
              )  #, linear_kron_testpred_alt[0], kernel_kron_testpred_alt[0]
        print('[0, 1]: ' + str(linear_kron_testpred[0, 1]) + ' ' +
              str(mkl_kernel_kron_testpred[0, 1]) + ' ' +
              str(ordrls_testpred[0, 1])
              )  #, linear_kron_testpred_alt[1], kernel_kron_testpred_alt[1]
        print('[1, 0]: ' + str(linear_kron_testpred[1, 0]) + ' ' +
              str(mkl_kernel_kron_testpred[1, 0]) + ' ' +
              str(ordrls_testpred[1, 0])
              )  #, linear_kron_testpred_alt[2], kernel_kron_testpred_alt[2]
        print('Meanabsdiff: MKL kernel KronRLS - ordinary RLS')
        print(str(np.mean(np.abs(mkl_kernel_kron_testpred - ordrls_testpred))))
        np.testing.assert_almost_equal(mkl_kernel_kron_testpred,
                                       ordrls_testpred,
                                       decimal=4)

        #Train polynomial kernel Kronecker RLS
        params = {}
        params["regparam"] = regparam
        params["K1"] = [K_train1, K_train1, K_train2]
        params["K2"] = [K_train1, K_train2, K_train2]
        params["weights"] = [1., 2., 1.]
        params["Y"] = Y_train_known_outputs
        params["label_row_inds"] = [
            label_row_inds, label_row_inds, label_col_inds
        ]
        params["label_col_inds"] = [
            label_row_inds, label_col_inds, label_col_inds
        ]

        class KernelCallback():
            def __init__(self):
                self.round = 0

            def callback(self, learner):
                self.round = self.round + 1
                #tp = KernelPairwisePredictor(learner.A, learner.input1_inds, learner.input2_inds, params["weights"]).predict([K_test1, K_test1], [K_test2, K_test2])
                #print(str(self.round) + ' ' + str(np.mean(np.abs(tp - ordrls_testpred.ravel(order = 'F')))))
            def finished(self, learner):
                print('finished')

        tcb = KernelCallback()
        params['callback'] = tcb
        poly_kernel_kron_learner = CGKronRLS(**params)
        poly_kernel_kron_testpred = poly_kernel_kron_learner.predict(
            [K_test1, K_test1, K_test2], [K_test1, K_test2, K_test2], [
                all_test_label_row_inds, all_test_label_row_inds,
                all_test_label_col_inds
            ], [
                all_test_label_row_inds, all_test_label_col_inds,
                all_test_label_col_inds
            ])
        #print(poly_kernel_kron_testpred, 'Polynomial kernel via CGKronRLS')

        #Train an ordinary RLS regressor with polynomial kernel for reference
        params = {}
        params["X"] = np.hstack([
            np.kron(np.ones((X_train2.shape[0], 1)), X_train1),
            np.kron(X_train2, np.ones((X_train1.shape[0], 1)))
        ])[incinds]
        #params["X"] = np.hstack([np.kron(X_train1, np.ones((X_train2.shape[0], 1))), np.kron(np.ones((X_train1.shape[0], 1)), X_train2)])[incinds]
        params["kernel"] = "PolynomialKernel"
        params["Y"] = Y_train_known_outputs
        params["regparam"] = regparam
        ordrls_poly_kernel_learner = RLS(**params)
        X_dir_test = np.hstack([
            np.kron(np.ones((X_test2.shape[0], 1)), X_test1),
            np.kron(X_test2, np.ones((X_test1.shape[0], 1)))
        ])
        #X_dir_test = np.hstack([np.kron(X_test1, np.ones((X_test2.shape[0], 1))), np.kron(np.ones((X_test1.shape[0], 1)), X_test2)])
        ordrls_poly_kernel_testpred = ordrls_poly_kernel_learner.predict(
            X_dir_test)
        #print(ordrls_poly_kernel_testpred, 'Ord. poly RLS')
        print(
            'Meanabsdiff: Polynomial kernel KronRLS - Ordinary polynomial kernel RLS'
        )
        print(
            str(
                np.mean(
                    np.abs(poly_kernel_kron_testpred -
                           ordrls_poly_kernel_testpred))))
        '''
示例#7
0
 def test_cg_kron_rls(self):
     
     
     regparam = 0.0001
     
     K_train1, K_train2, Y_train, K_test1, K_test2, Y_test, X_train1, X_train2, X_test1, X_test2 = self.generate_xortask()
     rows, columns = Y_train.shape
     print K_train1.shape, K_train2.shape, K_test1.shape, K_test2.shape, rows, columns
     rowstimescols = rows * columns
     indmatrix = np.mat(range(rowstimescols)).T.reshape(rows, columns)
     
     label_row_inds = [2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,0,0,1,1,2,2,2,2,3,4,5,6,6,7,9]
     label_col_inds = [2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,0,1,0,1,1,2,2,2,3,4,4,4,5,5,12]
     
     Y_train_nonzeros = []
     Y_alt = []
     B = np.mat(np.zeros((len(label_row_inds), rowstimescols)))
     for ind in range(len(label_row_inds)):
         i, j = label_row_inds[ind], label_col_inds[ind]
         Y_train_nonzeros.append(Y_train[i, j])
         Y_alt.append(Y_train[i, j])
         #B[ind, i * columns + j] = 1.
         B[ind, j * rows + i] = 1.
     #print B
     Y_train_nonzeros = np.mat(Y_train_nonzeros).T
     #Y_train_nonzeros = B * Y_train.reshape(rowstimescols, 1)
     
     #Train linear Kronecker RLS
     params = {}
     params["regparam"] = regparam
     params["xmatrix1"] = X_train1
     params["xmatrix2"] = X_train2
     params["train_labels"] = Y_train_nonzeros
     params["label_row_inds"] = label_row_inds
     params["label_col_inds"] = label_col_inds
     linear_kron_learner = CGKronRLS.createLearner(**params)
     linear_kron_learner.train()
     linear_kron_model = linear_kron_learner.getModel()
     linear_kron_testpred = linear_kron_model.predictWithDataMatricesAlt(X_test1, X_test2).reshape(X_test1.shape[0], X_test2.shape[0], order = 'F')
     
     #Train kernel Kronecker RLS
     params = {}
     params["regparam"] = regparam
     params["kmatrix1"] = K_train1
     params["kmatrix2"] = K_train2
     params["train_labels"] = Y_train_nonzeros
     params["label_row_inds"] = label_row_inds
     params["label_col_inds"] = label_col_inds
     kernel_kron_learner = CGKronRLS.createLearner(**params)
     kernel_kron_learner.train()
     kernel_kron_model = kernel_kron_learner.getModel()
     kernel_kron_testpred = kernel_kron_model.predictWithKernelMatrices(K_test1, K_test2)
     
     #Train an ordinary RLS regressor for reference
     K_Kron_train_x = np.kron(K_train2, K_train1)
     params = {}
     params["kmatrix"] = B * K_Kron_train_x * B.T
     params["train_labels"] = Y_train_nonzeros#B*(B.T * Y_train_nonzeros).reshape(rows, columns).reshape(rowstimescols, 1) # #Y_train.reshape(rowstimescols, 1)
     ordrls_learner = RLS.createLearner(**params)
     ordrls_learner.solve(regparam)
     ordrls_model = ordrls_learner.getModel()
     K_Kron_test_x = np.kron(K_test2, K_test1) * B.T
     ordrls_testpred = ordrls_model.predict(K_Kron_test_x)
     ordrls_testpred = ordrls_testpred.reshape(Y_test.shape[0], Y_test.shape[1], order = 'F')
     
     print linear_kron_testpred[0, 0], kernel_kron_testpred[0, 0], ordrls_testpred[0, 0]
     print linear_kron_testpred[0, 1], kernel_kron_testpred[0, 1], ordrls_testpred[0, 1]
     print linear_kron_testpred[1, 0], kernel_kron_testpred[1, 0], ordrls_testpred[1, 0]
     print np.mean(np.abs(linear_kron_testpred - ordrls_testpred)), np.mean(np.abs(kernel_kron_testpred - ordrls_testpred))
示例#8
0
    def test_cg_kron_rls(self):

        regparam = 0.0001

        K_train1, K_train2, Y_train, K_test1, K_test2, Y_test, X_train1, X_train2, X_test1, X_test2 = self.generate_xortask(
        )
        rows, columns = Y_train.shape
        print K_train1.shape, K_train2.shape, K_test1.shape, K_test2.shape, rows, columns
        rowstimescols = rows * columns
        indmatrix = np.mat(range(rowstimescols)).T.reshape(rows, columns)

        #label_row_inds = [2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,0,0,1,1,2,2,2,2,3,4,5,6,6,7,9]
        #label_col_inds = [2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,0,1,0,1,1,2,2,2,3,4,4,4,5,5,12]
        pointrange = np.arange(rows, columns)
        label_row_inds, label_col_inds = np.unravel_index(
            pointrange, (rows, columns))
        Y_train_nonzeros = []
        Y_alt = []
        B = np.mat(np.zeros((len(label_row_inds), rowstimescols)))
        for ind in range(len(label_row_inds)):
            i, j = label_row_inds[ind], label_col_inds[ind]
            Y_train_nonzeros.append(Y_train[i, j])
            Y_alt.append(Y_train[i, j])
            #B[ind, i * columns + j] = 1.
            B[ind, j * rows + i] = 1.
        #print B
        Y_train_nonzeros = np.mat(Y_train_nonzeros).T
        #Y_train_nonzeros = B * Y_train.reshape(rowstimescols, 1)

        #Train an ordinary RLS regressor for reference
        K_Kron_train_x = np.kron(K_train2, K_train1)
        params = {}
        params["kernel_matrix"] = B * K_Kron_train_x * B.T
        params[
            "train_labels"] = Y_train_nonzeros  #B*(B.T * Y_train_nonzeros).reshape(rows, columns).reshape(rowstimescols, 1) # #Y_train.reshape(rowstimescols, 1)
        ordrls_learner = RLS.createLearner(**params)
        ordrls_learner.solve(regparam)
        ordrls_model = ordrls_learner.getModel()
        K_Kron_test_x = np.kron(K_test2, K_test1) * B.T
        ordrls_testpred = ordrls_model.predict(K_Kron_test_x)
        ordrls_testpred = ordrls_testpred.reshape(Y_test.shape[0],
                                                  Y_test.shape[1],
                                                  order='F')

        #Train linear Kronecker RLS
        class TestCallback():
            def __init__(self):
                self.round = 0

            def callback(self, learner):
                self.round = self.round + 1
                print self.round

            def finished(self, learner):
                print 'finished'

        params = {}
        params["regparam"] = regparam
        params["xmatrix1"] = X_train1
        params["xmatrix2"] = X_train2
        params["train_labels"] = Y_train_nonzeros
        params["label_row_inds"] = label_row_inds
        params["label_col_inds"] = label_col_inds
        tcb = TestCallback()
        params['callback'] = tcb
        linear_kron_learner = CGKronRLS.createLearner(**params)
        linear_kron_learner.train()
        linear_kron_model = linear_kron_learner.getModel()
        linear_kron_testpred = linear_kron_model.predictWithDataMatrices(
            X_test1, X_test2).reshape(X_test1.shape[0],
                                      X_test2.shape[0],
                                      order='F')

        #params["warm_start"] = linear_kron_learner.W
        #linear_kron_learner = CGKronRLS.createLearner(**params)
        #linear_kron_learner.train()
        #linear_kron_model = linear_kron_learner.getModel()
        #linear_kron_testpred = linear_kron_model.predictWithDataMatricesAlt(X_test1, X_test2).reshape(X_test1.shape[0], X_test2.shape[0], order = 'F')

        #Train kernel Kronecker RLS
        params = {}
        params["regparam"] = regparam
        params["kmatrix1"] = K_train1
        params["kmatrix2"] = K_train2
        params["train_labels"] = Y_train_nonzeros
        params["label_row_inds"] = label_row_inds
        params["label_col_inds"] = label_col_inds

        class KernelCallback():
            def __init__(self):
                self.round = 0

            def callback(self, learner):
                self.round = self.round + 1
                tp = KernelPairwiseModel(
                    learner.A, learner.label_row_inds,
                    learner.label_col_inds).predictWithKernelMatrices(
                        K_test1, K_test2)
                print self.round, np.mean(np.abs(tp - ordrls_testpred))

            def finished(self, learner):
                print 'finished'

        tcb = KernelCallback()
        params['callback'] = tcb
        kernel_kron_learner = CGKronRLS.createLearner(**params)
        kernel_kron_learner.train()
        kernel_kron_model = kernel_kron_learner.getModel()
        kernel_kron_testpred = kernel_kron_model.predictWithKernelMatrices(
            K_test1, K_test2)
        kernel_kron_testpred_alt = kernel_kron_model.predictWithKernelMatrices(
            K_test1, K_test2, row_inds=[0, 0, 1], col_inds=[0, 1, 0])

        print linear_kron_testpred[0, 0], kernel_kron_testpred[
            0, 0], kernel_kron_testpred_alt[0], ordrls_testpred[0, 0]
        print linear_kron_testpred[0, 1], kernel_kron_testpred[
            0, 1], kernel_kron_testpred_alt[1], ordrls_testpred[0, 1]
        print linear_kron_testpred[1, 0], kernel_kron_testpred[
            1, 0], kernel_kron_testpred_alt[2], ordrls_testpred[1, 0]
        print np.mean(np.abs(linear_kron_testpred - ordrls_testpred)), np.mean(
            np.abs(kernel_kron_testpred - ordrls_testpred))
示例#9
0
    def test_cg_kron_rls(self):

        regparam = 0.0001

        K_train1, K_train2, Y_train, K_test1, K_test2, Y_test, X_train1, X_train2, X_test1, X_test2 = self.generate_xortask(
        )
        Y_train = Y_train.ravel(order='F')
        Y_test = Y_test.ravel(order='F')
        train_rows, train_columns = K_train1.shape[0], K_train2.shape[0]
        test_rows, test_columns = K_test1.shape[0], K_test2.shape[0]
        rowstimescols = train_rows * train_columns
        allindices = np.arange(rowstimescols)
        all_label_row_inds, all_label_col_inds = np.unravel_index(
            allindices, (train_rows, train_columns), order='F')
        incinds = pyrandom.sample(allindices, 50)
        label_row_inds, label_col_inds = all_label_row_inds[
            incinds], all_label_col_inds[incinds]
        Y_train_known_outputs = Y_train.reshape(rowstimescols,
                                                order='F')[incinds]

        #Train an ordinary RLS regressor for reference
        params = {}
        params["X"] = np.kron(K_train2, K_train1)[np.ix_(incinds, incinds)]
        params["kernel"] = "PrecomputedKernel"
        params["Y"] = Y_train_known_outputs
        params["regparam"] = regparam
        ordrls_learner = RLS(**params)
        ordrls_model = ordrls_learner.predictor
        K_Kron_test = np.kron(K_test2, K_test1)[:, incinds]
        ordrls_testpred = ordrls_model.predict(K_Kron_test)
        ordrls_testpred = ordrls_testpred.reshape((test_rows, test_columns),
                                                  order='F')

        #Train linear Kronecker RLS
        class TestCallback():
            def __init__(self):
                self.round = 0

            def callback(self, learner):
                self.round = self.round + 1
                tp = LinearPairwisePredictor(learner.W).predict(
                    X_test1, X_test2)
                print(
                    str(self.round) + ' ' +
                    str(np.mean(np.abs(tp -
                                       ordrls_testpred.ravel(order='F')))))

            def finished(self, learner):
                print('finished')

        params = {}
        params["regparam"] = regparam
        params["X1"] = X_train1
        params["X2"] = X_train2
        params["Y"] = Y_train_known_outputs
        params["label_row_inds"] = label_row_inds
        params["label_col_inds"] = label_col_inds
        tcb = TestCallback()
        params['callback'] = tcb
        linear_kron_learner = CGKronRLS(**params)
        linear_kron_testpred = linear_kron_learner.predict(
            X_test1, X_test2).reshape((test_rows, test_columns), order='F')
        linear_kron_testpred_alt = linear_kron_learner.predict(
            X_test1, X_test2, [0, 0, 1], [0, 1, 0])

        #Train kernel Kronecker RLS
        params = {}
        params["regparam"] = regparam
        params["K1"] = K_train1
        params["K2"] = K_train2
        params["Y"] = Y_train_known_outputs
        params["label_row_inds"] = label_row_inds
        params["label_col_inds"] = label_col_inds

        class KernelCallback():
            def __init__(self):
                self.round = 0

            def callback(self, learner):
                self.round = self.round + 1
                tp = KernelPairwisePredictor(learner.A, learner.input1_inds,
                                             learner.input2_inds).predict(
                                                 K_test1, K_test2)
                print(
                    str(self.round) + ' ' +
                    str(np.mean(np.abs(tp -
                                       ordrls_testpred.ravel(order='F')))))

            def finished(self, learner):
                print('finished')

        tcb = KernelCallback()
        params['callback'] = tcb
        kernel_kron_learner = CGKronRLS(**params)
        kernel_kron_testpred = kernel_kron_learner.predict(
            K_test1, K_test2).reshape((test_rows, test_columns), order='F')
        kernel_kron_testpred_alt = kernel_kron_learner.predict(
            K_test1, K_test2, [0, 0, 1], [0, 1, 0])

        print('Predictions: Linear CgKronRLS, Kernel CgKronRLS, ordinary RLS')
        print('[0, 0]: ' + str(linear_kron_testpred[0, 0]) + ' ' +
              str(kernel_kron_testpred[0, 0]) + ' ' +
              str(ordrls_testpred[0, 0])
              )  #, linear_kron_testpred_alt[0], kernel_kron_testpred_alt[0]
        print('[0, 1]: ' + str(linear_kron_testpred[0, 1]) + ' ' +
              str(kernel_kron_testpred[0, 1]) + ' ' +
              str(ordrls_testpred[0, 1])
              )  #, linear_kron_testpred_alt[1], kernel_kron_testpred_alt[1]
        print('[1, 0]: ' + str(linear_kron_testpred[1, 0]) + ' ' +
              str(kernel_kron_testpred[1, 0]) + ' ' +
              str(ordrls_testpred[1, 0])
              )  #, linear_kron_testpred_alt[2], kernel_kron_testpred_alt[2]
        print(
            'Meanabsdiff: linear KronRLS - ordinary RLS, kernel KronRLS - ordinary RLS'
        )
        print(
            str(np.mean(np.abs(linear_kron_testpred - ordrls_testpred))) +
            ' ' + str(np.mean(np.abs(kernel_kron_testpred - ordrls_testpred))))
        np.testing.assert_almost_equal(linear_kron_testpred,
                                       ordrls_testpred,
                                       decimal=5)
        np.testing.assert_almost_equal(kernel_kron_testpred,
                                       ordrls_testpred,
                                       decimal=5)
示例#10
0
 def test_cg_kron_rls(self):
     
     regparam = 0.0001
     
     K_train1, K_train2, Y_train, K_test1, K_test2, Y_test, X_train1, X_train2, X_test1, X_test2 = self.generate_xortask()
     Y_train = Y_train.ravel(order = 'F')
     Y_test = Y_test.ravel(order = 'F')
     train_rows, train_columns = K_train1.shape[0], K_train2.shape[0]
     test_rows, test_columns = K_test1.shape[0], K_test2.shape[0]
     rowstimescols = train_rows * train_columns
     allindices = np.arange(rowstimescols)
     all_label_row_inds, all_label_col_inds = np.unravel_index(allindices, (train_rows, train_columns), order = 'F') 
     incinds = pyrandom.sample(allindices, 50)
     label_row_inds, label_col_inds = all_label_row_inds[incinds], all_label_col_inds[incinds] 
     Y_train_known_outputs = Y_train.reshape(rowstimescols, order = 'F')[incinds]
     
     #Train an ordinary RLS regressor for reference
     params = {}
     params["X"] = np.kron(K_train2, K_train1)[np.ix_(incinds, incinds)]
     params["kernel"] = "PrecomputedKernel"
     params["Y"] = Y_train_known_outputs
     params["regparam"] = regparam
     ordrls_learner = RLS(**params)
     ordrls_model = ordrls_learner.predictor
     K_Kron_test = np.kron(K_test2, K_test1)[:, incinds]
     ordrls_testpred = ordrls_model.predict(K_Kron_test)
     ordrls_testpred = ordrls_testpred.reshape((test_rows, test_columns), order = 'F')
     
     #Train linear Kronecker RLS
     class TestCallback():
         def __init__(self):
             self.round = 0
         def callback(self, learner):
             self.round = self.round + 1
             tp = LinearPairwisePredictor(learner.W).predict(X_test1, X_test2)
             print(str(self.round) + ' ' + str(np.mean(np.abs(tp - ordrls_testpred.ravel(order = 'F')))))
         def finished(self, learner):
             print('finished')
     params = {}
     params["regparam"] = regparam
     params["X1"] = X_train1
     params["X2"] = X_train2
     params["Y"] = Y_train_known_outputs
     params["label_row_inds"] = label_row_inds
     params["label_col_inds"] = label_col_inds
     tcb = TestCallback()
     params['callback'] = tcb
     linear_kron_learner = CGKronRLS(**params)
     linear_kron_testpred = linear_kron_learner.predict(X_test1, X_test2).reshape((test_rows, test_columns), order = 'F')
     linear_kron_testpred_alt = linear_kron_learner.predict(X_test1, X_test2, [0, 0, 1], [0, 1, 0])
     
     #Train kernel Kronecker RLS
     params = {}
     params["regparam"] = regparam
     params["K1"] = K_train1
     params["K2"] = K_train2
     params["Y"] = Y_train_known_outputs
     params["label_row_inds"] = label_row_inds
     params["label_col_inds"] = label_col_inds
     class KernelCallback():
         def __init__(self):
             self.round = 0
         def callback(self, learner):
             self.round = self.round + 1
             tp = KernelPairwisePredictor(learner.A, learner.input1_inds, learner.input2_inds).predict(K_test1, K_test2)
             print(str(self.round) + ' ' + str(np.mean(np.abs(tp - ordrls_testpred.ravel(order = 'F')))))
         def finished(self, learner):
             print('finished')
     tcb = KernelCallback()
     params['callback'] = tcb
     kernel_kron_learner = CGKronRLS(**params)
     kernel_kron_testpred = kernel_kron_learner.predict(K_test1, K_test2).reshape((test_rows, test_columns), order = 'F')
     kernel_kron_testpred_alt = kernel_kron_learner.predict(K_test1, K_test2, [0, 0, 1], [0, 1, 0])
     
     print('Predictions: Linear CgKronRLS, Kernel CgKronRLS, ordinary RLS')
     print('[0, 0]: ' + str(linear_kron_testpred[0, 0]) + ' ' + str(kernel_kron_testpred[0, 0]) + ' ' + str(ordrls_testpred[0, 0]))#, linear_kron_testpred_alt[0], kernel_kron_testpred_alt[0]
     print('[0, 1]: ' + str(linear_kron_testpred[0, 1]) + ' ' + str(kernel_kron_testpred[0, 1]) + ' ' + str(ordrls_testpred[0, 1]))#, linear_kron_testpred_alt[1], kernel_kron_testpred_alt[1]
     print('[1, 0]: ' + str(linear_kron_testpred[1, 0]) + ' ' + str(kernel_kron_testpred[1, 0]) + ' ' + str(ordrls_testpred[1, 0]))#, linear_kron_testpred_alt[2], kernel_kron_testpred_alt[2]
     print('Meanabsdiff: linear KronRLS - ordinary RLS, kernel KronRLS - ordinary RLS')
     print(str(np.mean(np.abs(linear_kron_testpred - ordrls_testpred))) + ' ' + str(np.mean(np.abs(kernel_kron_testpred - ordrls_testpred))))
     np.testing.assert_almost_equal(linear_kron_testpred, ordrls_testpred, decimal=5)
     np.testing.assert_almost_equal(kernel_kron_testpred, ordrls_testpred, decimal=5)
示例#11
0
 def test_cg_kron_rls(self):
     
     
     regparam = 0.0001
     
     K_train1, K_train2, Y_train, K_test1, K_test2, Y_test, X_train1, X_train2, X_test1, X_test2 = self.generate_xortask()
     rows, columns = Y_train.shape
     print K_train1.shape, K_train2.shape, K_test1.shape, K_test2.shape, rows, columns
     rowstimescols = rows * columns
     indmatrix = np.mat(range(rowstimescols)).T.reshape(rows, columns)
     
     #label_row_inds = [2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,0,0,1,1,2,2,2,2,3,4,5,6,6,7,9]
     #label_col_inds = [2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,0,1,0,1,1,2,2,2,3,4,4,4,5,5,12]
     pointrange = np.arange(rows, columns)
     label_row_inds, label_col_inds = np.unravel_index(pointrange, (rows, columns)) 
     Y_train_nonzeros = []
     Y_alt = []
     B = np.mat(np.zeros((len(label_row_inds), rowstimescols)))
     for ind in range(len(label_row_inds)):
         i, j = label_row_inds[ind], label_col_inds[ind]
         Y_train_nonzeros.append(Y_train[i, j])
         Y_alt.append(Y_train[i, j])
         #B[ind, i * columns + j] = 1.
         B[ind, j * rows + i] = 1.
     #print B
     Y_train_nonzeros = np.mat(Y_train_nonzeros).T
     #Y_train_nonzeros = B * Y_train.reshape(rowstimescols, 1)
     
     #Train an ordinary RLS regressor for reference
     K_Kron_train_x = np.kron(K_train2, K_train1)
     params = {}
     params["kernel_matrix"] = B * K_Kron_train_x * B.T
     params["train_labels"] = Y_train_nonzeros#B*(B.T * Y_train_nonzeros).reshape(rows, columns).reshape(rowstimescols, 1) # #Y_train.reshape(rowstimescols, 1)
     ordrls_learner = RLS.createLearner(**params)
     ordrls_learner.solve(regparam)
     ordrls_model = ordrls_learner.getModel()
     K_Kron_test_x = np.kron(K_test2, K_test1) * B.T
     ordrls_testpred = ordrls_model.predict(K_Kron_test_x)
     ordrls_testpred = ordrls_testpred.reshape(Y_test.shape[0], Y_test.shape[1], order = 'F')
     
     #Train linear Kronecker RLS
     class TestCallback():
         def __init__(self):
             self.round = 0
         def callback(self, learner):
             self.round = self.round + 1
             print self.round
         def finished(self, learner):
             print 'finished'
     params = {}
     params["regparam"] = regparam
     params["xmatrix1"] = X_train1
     params["xmatrix2"] = X_train2
     params["train_labels"] = Y_train_nonzeros
     params["label_row_inds"] = label_row_inds
     params["label_col_inds"] = label_col_inds
     tcb = TestCallback()
     params['callback'] = tcb
     linear_kron_learner = CGKronRLS.createLearner(**params)
     linear_kron_learner.train()
     linear_kron_model = linear_kron_learner.getModel()
     linear_kron_testpred = linear_kron_model.predictWithDataMatrices(X_test1, X_test2).reshape(X_test1.shape[0], X_test2.shape[0], order = 'F')
     
     #params["warm_start"] = linear_kron_learner.W
     #linear_kron_learner = CGKronRLS.createLearner(**params)
     #linear_kron_learner.train()
     #linear_kron_model = linear_kron_learner.getModel()
     #linear_kron_testpred = linear_kron_model.predictWithDataMatricesAlt(X_test1, X_test2).reshape(X_test1.shape[0], X_test2.shape[0], order = 'F')
     
     #Train kernel Kronecker RLS
     params = {}
     params["regparam"] = regparam
     params["kmatrix1"] = K_train1
     params["kmatrix2"] = K_train2
     params["train_labels"] = Y_train_nonzeros
     params["label_row_inds"] = label_row_inds
     params["label_col_inds"] = label_col_inds
     class KernelCallback():
         def __init__(self):
             self.round = 0
         def callback(self, learner):
             self.round = self.round + 1
             tp = KernelPairwiseModel(learner.A, learner.label_row_inds, learner.label_col_inds).predictWithKernelMatrices(K_test1, K_test2)
             print self.round, np.mean(np.abs(tp - ordrls_testpred))
         def finished(self, learner):
             print 'finished'
     tcb = KernelCallback()
     params['callback'] = tcb
     kernel_kron_learner = CGKronRLS.createLearner(**params)
     kernel_kron_learner.train()
     kernel_kron_model = kernel_kron_learner.getModel()
     kernel_kron_testpred = kernel_kron_model.predictWithKernelMatrices(K_test1, K_test2)
     
     print linear_kron_testpred[0, 0], kernel_kron_testpred[0, 0], ordrls_testpred[0, 0]
     print linear_kron_testpred[0, 1], kernel_kron_testpred[0, 1], ordrls_testpred[0, 1]
     print linear_kron_testpred[1, 0], kernel_kron_testpred[1, 0], ordrls_testpred[1, 0]
     print np.mean(np.abs(linear_kron_testpred - ordrls_testpred)), np.mean(np.abs(kernel_kron_testpred - ordrls_testpred))