def single_holdout(XD, XT, Y, label_row_inds, label_col_inds, train_sets, val_sets, measure, regparam, rls,
                   maxiter=50, inneriter=100, incindices = None):
    cvrounds = len(train_sets)
    all_predictions = np.zeros((maxiter, Y.shape[0]))
    #print 'general nfold.'
    trainindices = train_sets[0]
    valindices = val_sets[0]
    class TestCallback(CF):
        def __init__(self):
            #self.iter = 0
            self.results = []
        def callback(self, learner):
            P = np.mat(learner.getModel().predictWithDataMatricesAlt(XD, XT, label_row_inds[valindices], label_col_inds[valindices])).T
            #print self.iter, measure(Y[valindices], P)
            self.results.append(measure(Y[valindices], P))
            #self.iter += 1
        def get_results(self):
            return self.results;
    params = {}
    params["xmatrix1"] = XD
    params["xmatrix2"] = XT
    params["train_labels"] = Y[trainindices]
    params["label_row_inds"] = label_row_inds[trainindices]
    params["label_col_inds"] = label_col_inds[trainindices]
    params["maxiter"] = maxiter
    params["inneriter"] = inneriter
    callback = TestCallback()
    params['callback'] = callback
    if rls:
        learner = CGKronRLS.createLearner(**params)
    else:
        learner = KronSVM.createLearner(**params)
    learner.solve_linear(regparam)
    return callback.get_results()
def general_nfold_cv_no_imputation(XD, XT, Y, label_row_inds, label_col_inds, measure, train_sets, val_sets, regparam, rls, incindices = None):

    cvrounds = len(train_sets)
    maxiter = 10
    all_predictions = np.zeros((maxiter, Y.shape[0]))
    print 'general nfold.'
    for foldind in range(cvrounds):
        trainindices = train_sets[foldind]
        valindices = val_sets[foldind]
        class TestCallback(CF):
            def __init__(self):
                self.iter = 0
            def callback(self, learner):
                all_predictions[self.iter][valindices] = np.mat(learner.getModel().predictWithDataMatricesAlt(XD, XT, label_row_inds[valindices], label_col_inds[valindices])).T
                self.iter += 1
        params = {}
        params["xmatrix1"] = XD
        params["xmatrix2"] = XT
        params["train_labels"] = Y[trainindices]
        params["label_row_inds"] = label_row_inds[trainindices]
        params["label_col_inds"] = label_col_inds[trainindices]
        params["maxiter"] = maxiter
        params['callback'] = TestCallback()
        if rls:
            learner = CGKronRLS.createLearner(**params)
        else:
            learner = KronSVM.createLearner(**params)
        #regparam = 2. ** (15)
        learner.solve_linear(regparam)
        print foldind, 'done'
    print
    bestperf = -float('Inf')
    bestparam = None
    for iterind in range(maxiter):
        if incindices == None:
            perf = measure(Y, all_predictions[iterind])
        else:
            perf = measure(Y[incindices], all_predictions[iterind][incindices])
        if perf > bestperf:
            bestperf = perf
            bestparam = iterind
        print iterind, perf
    return bestparam, bestperf, all_predictions
def artificial_data_experiment():
    maxiter = 1
    m1 = 1000
    m2 = 10000#0
    mm = m1 * m2
    d = 100
    l = 10000#0
    params = {}
    params["xmatrix1"] = np.random.rand(m1, d)
    params["xmatrix2"] = np.random.rand(m2, d)
    labelinds = pyrandom.sample(range(mm), l)
    rows, cols = np.unravel_index(range(mm), (m1, m2))
    label_row_inds = rows[labelinds]
    label_col_inds = cols[labelinds]
    params["train_labels"] = np.mat(labelinds, dtype=np.float64).T
    params["label_row_inds"] = label_row_inds
    params["label_col_inds"] = label_col_inds
    params["maxiter"] = maxiter
    learner = CGKronRLS.createLearner(**params)
    regparam = 2. ** (20)
    learner.solve_linear(regparam)
Пример #4
0
    def test_cg_kron_rls(self):

        regparam = 0.0001

        K_train1, K_train2, Y_train, K_test1, K_test2, Y_test, X_train1, X_train2, X_test1, X_test2 = self.generate_xortask(
        )
        rows, columns = Y_train.shape
        print K_train1.shape, K_train2.shape, K_test1.shape, K_test2.shape, rows, columns
        rowstimescols = rows * columns
        indmatrix = np.mat(range(rowstimescols)).T.reshape(rows, columns)

        label_row_inds = [
            2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
            2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 0, 0, 1, 1, 2, 2, 2,
            2, 3, 4, 5, 6, 6, 7, 9
        ]
        label_col_inds = [
            2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2,
            2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 0, 1, 0, 1, 1, 2, 2,
            2, 3, 4, 4, 4, 5, 5, 12
        ]

        Y_train_nonzeros = []
        Y_alt = []
        B = np.mat(np.zeros((len(label_row_inds), rowstimescols)))
        for ind in range(len(label_row_inds)):
            i, j = label_row_inds[ind], label_col_inds[ind]
            Y_train_nonzeros.append(Y_train[i, j])
            Y_alt.append(Y_train[i, j])
            #B[ind, i * columns + j] = 1.
            B[ind, j * rows + i] = 1.
        #print B
        Y_train_nonzeros = np.mat(Y_train_nonzeros).T
        #Y_train_nonzeros = B * Y_train.reshape(rowstimescols, 1)

        #Train linear Kronecker RLS
        params = {}
        params["regparam"] = regparam
        params["xmatrix1"] = X_train1
        params["xmatrix2"] = X_train2
        params["train_labels"] = Y_train_nonzeros
        params["label_row_inds"] = label_row_inds
        params["label_col_inds"] = label_col_inds
        linear_kron_learner = CGKronRLS.createLearner(**params)
        linear_kron_learner.train()
        linear_kron_model = linear_kron_learner.getModel()
        linear_kron_testpred = linear_kron_model.predictWithDataMatricesAlt(
            X_test1, X_test2).reshape(X_test1.shape[0],
                                      X_test2.shape[0],
                                      order='F')

        #Train kernel Kronecker RLS
        params = {}
        params["regparam"] = regparam
        params["kmatrix1"] = K_train1
        params["kmatrix2"] = K_train2
        params["train_labels"] = Y_train_nonzeros
        params["label_row_inds"] = label_row_inds
        params["label_col_inds"] = label_col_inds
        kernel_kron_learner = CGKronRLS.createLearner(**params)
        kernel_kron_learner.train()
        kernel_kron_model = kernel_kron_learner.getModel()
        kernel_kron_testpred = kernel_kron_model.predictWithKernelMatrices(
            K_test1, K_test2)

        #Train an ordinary RLS regressor for reference
        K_Kron_train_x = np.kron(K_train2, K_train1)
        params = {}
        params["kmatrix"] = B * K_Kron_train_x * B.T
        params[
            "train_labels"] = Y_train_nonzeros  #B*(B.T * Y_train_nonzeros).reshape(rows, columns).reshape(rowstimescols, 1) # #Y_train.reshape(rowstimescols, 1)
        ordrls_learner = RLS.createLearner(**params)
        ordrls_learner.solve(regparam)
        ordrls_model = ordrls_learner.getModel()
        K_Kron_test_x = np.kron(K_test2, K_test1) * B.T
        ordrls_testpred = ordrls_model.predict(K_Kron_test_x)
        ordrls_testpred = ordrls_testpred.reshape(Y_test.shape[0],
                                                  Y_test.shape[1],
                                                  order='F')

        print linear_kron_testpred[0, 0], kernel_kron_testpred[
            0, 0], ordrls_testpred[0, 0]
        print linear_kron_testpred[0, 1], kernel_kron_testpred[
            0, 1], ordrls_testpred[0, 1]
        print linear_kron_testpred[1, 0], kernel_kron_testpred[
            1, 0], ordrls_testpred[1, 0]
        print np.mean(np.abs(linear_kron_testpred - ordrls_testpred)), np.mean(
            np.abs(kernel_kron_testpred - ordrls_testpred))
Пример #5
0
 def test_cg_kron_rls(self):
     
     
     regparam = 0.0001
     
     K_train1, K_train2, Y_train, K_test1, K_test2, Y_test, X_train1, X_train2, X_test1, X_test2 = self.generate_xortask()
     rows, columns = Y_train.shape
     print K_train1.shape, K_train2.shape, K_test1.shape, K_test2.shape, rows, columns
     rowstimescols = rows * columns
     indmatrix = np.mat(range(rowstimescols)).T.reshape(rows, columns)
     
     label_row_inds = [2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,0,0,1,1,2,2,2,2,3,4,5,6,6,7,9]
     label_col_inds = [2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,0,1,0,1,1,2,2,2,3,4,4,4,5,5,12]
     
     Y_train_nonzeros = []
     Y_alt = []
     B = np.mat(np.zeros((len(label_row_inds), rowstimescols)))
     for ind in range(len(label_row_inds)):
         i, j = label_row_inds[ind], label_col_inds[ind]
         Y_train_nonzeros.append(Y_train[i, j])
         Y_alt.append(Y_train[i, j])
         #B[ind, i * columns + j] = 1.
         B[ind, j * rows + i] = 1.
     #print B
     Y_train_nonzeros = np.mat(Y_train_nonzeros).T
     #Y_train_nonzeros = B * Y_train.reshape(rowstimescols, 1)
     
     #Train linear Kronecker RLS
     params = {}
     params["regparam"] = regparam
     params["xmatrix1"] = X_train1
     params["xmatrix2"] = X_train2
     params["train_labels"] = Y_train_nonzeros
     params["label_row_inds"] = label_row_inds
     params["label_col_inds"] = label_col_inds
     linear_kron_learner = CGKronRLS.createLearner(**params)
     linear_kron_learner.train()
     linear_kron_model = linear_kron_learner.getModel()
     linear_kron_testpred = linear_kron_model.predictWithDataMatricesAlt(X_test1, X_test2).reshape(X_test1.shape[0], X_test2.shape[0], order = 'F')
     
     #Train kernel Kronecker RLS
     params = {}
     params["regparam"] = regparam
     params["kmatrix1"] = K_train1
     params["kmatrix2"] = K_train2
     params["train_labels"] = Y_train_nonzeros
     params["label_row_inds"] = label_row_inds
     params["label_col_inds"] = label_col_inds
     kernel_kron_learner = CGKronRLS.createLearner(**params)
     kernel_kron_learner.train()
     kernel_kron_model = kernel_kron_learner.getModel()
     kernel_kron_testpred = kernel_kron_model.predictWithKernelMatrices(K_test1, K_test2)
     
     #Train an ordinary RLS regressor for reference
     K_Kron_train_x = np.kron(K_train2, K_train1)
     params = {}
     params["kmatrix"] = B * K_Kron_train_x * B.T
     params["train_labels"] = Y_train_nonzeros#B*(B.T * Y_train_nonzeros).reshape(rows, columns).reshape(rowstimescols, 1) # #Y_train.reshape(rowstimescols, 1)
     ordrls_learner = RLS.createLearner(**params)
     ordrls_learner.solve(regparam)
     ordrls_model = ordrls_learner.getModel()
     K_Kron_test_x = np.kron(K_test2, K_test1) * B.T
     ordrls_testpred = ordrls_model.predict(K_Kron_test_x)
     ordrls_testpred = ordrls_testpred.reshape(Y_test.shape[0], Y_test.shape[1], order = 'F')
     
     print linear_kron_testpred[0, 0], kernel_kron_testpred[0, 0], ordrls_testpred[0, 0]
     print linear_kron_testpred[0, 1], kernel_kron_testpred[0, 1], ordrls_testpred[0, 1]
     print linear_kron_testpred[1, 0], kernel_kron_testpred[1, 0], ordrls_testpred[1, 0]
     print np.mean(np.abs(linear_kron_testpred - ordrls_testpred)), np.mean(np.abs(kernel_kron_testpred - ordrls_testpred))
Пример #6
0
    def test_cg_kron_rls(self):

        regparam = 0.0001

        K_train1, K_train2, Y_train, K_test1, K_test2, Y_test, X_train1, X_train2, X_test1, X_test2 = self.generate_xortask(
        )
        rows, columns = Y_train.shape
        print K_train1.shape, K_train2.shape, K_test1.shape, K_test2.shape, rows, columns
        rowstimescols = rows * columns
        indmatrix = np.mat(range(rowstimescols)).T.reshape(rows, columns)

        #label_row_inds = [2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,0,0,1,1,2,2,2,2,3,4,5,6,6,7,9]
        #label_col_inds = [2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,0,1,0,1,1,2,2,2,3,4,4,4,5,5,12]
        pointrange = np.arange(rows, columns)
        label_row_inds, label_col_inds = np.unravel_index(
            pointrange, (rows, columns))
        Y_train_nonzeros = []
        Y_alt = []
        B = np.mat(np.zeros((len(label_row_inds), rowstimescols)))
        for ind in range(len(label_row_inds)):
            i, j = label_row_inds[ind], label_col_inds[ind]
            Y_train_nonzeros.append(Y_train[i, j])
            Y_alt.append(Y_train[i, j])
            #B[ind, i * columns + j] = 1.
            B[ind, j * rows + i] = 1.
        #print B
        Y_train_nonzeros = np.mat(Y_train_nonzeros).T
        #Y_train_nonzeros = B * Y_train.reshape(rowstimescols, 1)

        #Train an ordinary RLS regressor for reference
        K_Kron_train_x = np.kron(K_train2, K_train1)
        params = {}
        params["kernel_matrix"] = B * K_Kron_train_x * B.T
        params[
            "train_labels"] = Y_train_nonzeros  #B*(B.T * Y_train_nonzeros).reshape(rows, columns).reshape(rowstimescols, 1) # #Y_train.reshape(rowstimescols, 1)
        ordrls_learner = RLS.createLearner(**params)
        ordrls_learner.solve(regparam)
        ordrls_model = ordrls_learner.getModel()
        K_Kron_test_x = np.kron(K_test2, K_test1) * B.T
        ordrls_testpred = ordrls_model.predict(K_Kron_test_x)
        ordrls_testpred = ordrls_testpred.reshape(Y_test.shape[0],
                                                  Y_test.shape[1],
                                                  order='F')

        #Train linear Kronecker RLS
        class TestCallback():
            def __init__(self):
                self.round = 0

            def callback(self, learner):
                self.round = self.round + 1
                print self.round

            def finished(self, learner):
                print 'finished'

        params = {}
        params["regparam"] = regparam
        params["xmatrix1"] = X_train1
        params["xmatrix2"] = X_train2
        params["train_labels"] = Y_train_nonzeros
        params["label_row_inds"] = label_row_inds
        params["label_col_inds"] = label_col_inds
        tcb = TestCallback()
        params['callback'] = tcb
        linear_kron_learner = CGKronRLS.createLearner(**params)
        linear_kron_learner.train()
        linear_kron_model = linear_kron_learner.getModel()
        linear_kron_testpred = linear_kron_model.predictWithDataMatrices(
            X_test1, X_test2).reshape(X_test1.shape[0],
                                      X_test2.shape[0],
                                      order='F')

        #params["warm_start"] = linear_kron_learner.W
        #linear_kron_learner = CGKronRLS.createLearner(**params)
        #linear_kron_learner.train()
        #linear_kron_model = linear_kron_learner.getModel()
        #linear_kron_testpred = linear_kron_model.predictWithDataMatricesAlt(X_test1, X_test2).reshape(X_test1.shape[0], X_test2.shape[0], order = 'F')

        #Train kernel Kronecker RLS
        params = {}
        params["regparam"] = regparam
        params["kmatrix1"] = K_train1
        params["kmatrix2"] = K_train2
        params["train_labels"] = Y_train_nonzeros
        params["label_row_inds"] = label_row_inds
        params["label_col_inds"] = label_col_inds

        class KernelCallback():
            def __init__(self):
                self.round = 0

            def callback(self, learner):
                self.round = self.round + 1
                tp = KernelPairwiseModel(
                    learner.A, learner.label_row_inds,
                    learner.label_col_inds).predictWithKernelMatrices(
                        K_test1, K_test2)
                print self.round, np.mean(np.abs(tp - ordrls_testpred))

            def finished(self, learner):
                print 'finished'

        tcb = KernelCallback()
        params['callback'] = tcb
        kernel_kron_learner = CGKronRLS.createLearner(**params)
        kernel_kron_learner.train()
        kernel_kron_model = kernel_kron_learner.getModel()
        kernel_kron_testpred = kernel_kron_model.predictWithKernelMatrices(
            K_test1, K_test2)
        kernel_kron_testpred_alt = kernel_kron_model.predictWithKernelMatrices(
            K_test1, K_test2, row_inds=[0, 0, 1], col_inds=[0, 1, 0])

        print linear_kron_testpred[0, 0], kernel_kron_testpred[
            0, 0], kernel_kron_testpred_alt[0], ordrls_testpred[0, 0]
        print linear_kron_testpred[0, 1], kernel_kron_testpred[
            0, 1], kernel_kron_testpred_alt[1], ordrls_testpred[0, 1]
        print linear_kron_testpred[1, 0], kernel_kron_testpred[
            1, 0], kernel_kron_testpred_alt[2], ordrls_testpred[1, 0]
        print np.mean(np.abs(linear_kron_testpred - ordrls_testpred)), np.mean(
            np.abs(kernel_kron_testpred - ordrls_testpred))
Пример #7
0
 def test_cg_kron_rls(self):
     
     
     regparam = 0.0001
     
     K_train1, K_train2, Y_train, K_test1, K_test2, Y_test, X_train1, X_train2, X_test1, X_test2 = self.generate_xortask()
     rows, columns = Y_train.shape
     print K_train1.shape, K_train2.shape, K_test1.shape, K_test2.shape, rows, columns
     rowstimescols = rows * columns
     indmatrix = np.mat(range(rowstimescols)).T.reshape(rows, columns)
     
     #label_row_inds = [2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,0,0,1,1,2,2,2,2,3,4,5,6,6,7,9]
     #label_col_inds = [2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,2,0,1,0,1,1,2,2,2,3,4,4,4,5,5,12]
     pointrange = np.arange(rows, columns)
     label_row_inds, label_col_inds = np.unravel_index(pointrange, (rows, columns)) 
     Y_train_nonzeros = []
     Y_alt = []
     B = np.mat(np.zeros((len(label_row_inds), rowstimescols)))
     for ind in range(len(label_row_inds)):
         i, j = label_row_inds[ind], label_col_inds[ind]
         Y_train_nonzeros.append(Y_train[i, j])
         Y_alt.append(Y_train[i, j])
         #B[ind, i * columns + j] = 1.
         B[ind, j * rows + i] = 1.
     #print B
     Y_train_nonzeros = np.mat(Y_train_nonzeros).T
     #Y_train_nonzeros = B * Y_train.reshape(rowstimescols, 1)
     
     #Train an ordinary RLS regressor for reference
     K_Kron_train_x = np.kron(K_train2, K_train1)
     params = {}
     params["kernel_matrix"] = B * K_Kron_train_x * B.T
     params["train_labels"] = Y_train_nonzeros#B*(B.T * Y_train_nonzeros).reshape(rows, columns).reshape(rowstimescols, 1) # #Y_train.reshape(rowstimescols, 1)
     ordrls_learner = RLS.createLearner(**params)
     ordrls_learner.solve(regparam)
     ordrls_model = ordrls_learner.getModel()
     K_Kron_test_x = np.kron(K_test2, K_test1) * B.T
     ordrls_testpred = ordrls_model.predict(K_Kron_test_x)
     ordrls_testpred = ordrls_testpred.reshape(Y_test.shape[0], Y_test.shape[1], order = 'F')
     
     #Train linear Kronecker RLS
     class TestCallback():
         def __init__(self):
             self.round = 0
         def callback(self, learner):
             self.round = self.round + 1
             print self.round
         def finished(self, learner):
             print 'finished'
     params = {}
     params["regparam"] = regparam
     params["xmatrix1"] = X_train1
     params["xmatrix2"] = X_train2
     params["train_labels"] = Y_train_nonzeros
     params["label_row_inds"] = label_row_inds
     params["label_col_inds"] = label_col_inds
     tcb = TestCallback()
     params['callback'] = tcb
     linear_kron_learner = CGKronRLS.createLearner(**params)
     linear_kron_learner.train()
     linear_kron_model = linear_kron_learner.getModel()
     linear_kron_testpred = linear_kron_model.predictWithDataMatrices(X_test1, X_test2).reshape(X_test1.shape[0], X_test2.shape[0], order = 'F')
     
     #params["warm_start"] = linear_kron_learner.W
     #linear_kron_learner = CGKronRLS.createLearner(**params)
     #linear_kron_learner.train()
     #linear_kron_model = linear_kron_learner.getModel()
     #linear_kron_testpred = linear_kron_model.predictWithDataMatricesAlt(X_test1, X_test2).reshape(X_test1.shape[0], X_test2.shape[0], order = 'F')
     
     #Train kernel Kronecker RLS
     params = {}
     params["regparam"] = regparam
     params["kmatrix1"] = K_train1
     params["kmatrix2"] = K_train2
     params["train_labels"] = Y_train_nonzeros
     params["label_row_inds"] = label_row_inds
     params["label_col_inds"] = label_col_inds
     class KernelCallback():
         def __init__(self):
             self.round = 0
         def callback(self, learner):
             self.round = self.round + 1
             tp = KernelPairwiseModel(learner.A, learner.label_row_inds, learner.label_col_inds).predictWithKernelMatrices(K_test1, K_test2)
             print self.round, np.mean(np.abs(tp - ordrls_testpred))
         def finished(self, learner):
             print 'finished'
     tcb = KernelCallback()
     params['callback'] = tcb
     kernel_kron_learner = CGKronRLS.createLearner(**params)
     kernel_kron_learner.train()
     kernel_kron_model = kernel_kron_learner.getModel()
     kernel_kron_testpred = kernel_kron_model.predictWithKernelMatrices(K_test1, K_test2)
     
     print linear_kron_testpred[0, 0], kernel_kron_testpred[0, 0], ordrls_testpred[0, 0]
     print linear_kron_testpred[0, 1], kernel_kron_testpred[0, 1], ordrls_testpred[0, 1]
     print linear_kron_testpred[1, 0], kernel_kron_testpred[1, 0], ordrls_testpred[1, 0]
     print np.mean(np.abs(linear_kron_testpred - ordrls_testpred)), np.mean(np.abs(kernel_kron_testpred - ordrls_testpred))