示例#1
0
def plss(X, y, cv, n_components=1):
    """
    """
    pls = PLSRegression(n_components=n_components)
    sse = np.zeros(y.shape[1])
    for train, test in cv:
        X_train, X_test = X[train], X[test]
        y_train, y_test = y[train], y[test]
        y0 = y_train.mean(0)
        X0 = X_train.mean(0)
        pls.fit(X_train - X0, y_train - y0)
        sse += np.sum((y_test - y0 - pls.predict(X_test - X0))**2, 0)
    return sse
示例#2
0
 def dict2mean(X, dict):
     plsca = PLSRegression(n_components=np.shape(dict['coefs'])[0])
     plsca.x_mean_ = dict['x_mean']
     plsca.y_mean_ = dict['y_mean']
     plsca.coefs = dict['coefs']
     return plsca.predict(X)
示例#3
0
    np.corrcoef(Y_test_r[:, 0], Y_test_r[:, 1])[0, 1])
pl.legend()
pl.show()

###############################################################################
# PLS regression, with multivariate response, a.k.a. PLS2

n = 1000
q = 3
p = 10
X = np.random.normal(size=n * p).reshape((n, p))
B = np.array([[1, 2] + [0] * (p - 2)] * q).T
# each Yj = 1*X1 + 2*X2 + noize
Y = np.dot(X, B) + np.random.normal(size=n * q).reshape((n, q)) + 5

pls2 = PLSRegression(n_components=3)
pls2.fit(X, Y)
print "True B (such that: Y = XB + Err)"
print B
# compare pls2.coefs with B
print "Estimated B"
print np.round(pls2.coefs, 1)
pls2.predict(X)

###############################################################################
# PLS regression, with univariate response, a.k.a. PLS1

n = 1000
p = 10
X = np.random.normal(size=n * p).reshape((n, p))
y = X[:, 0] + 2 * X[:, 1] + np.random.normal(size=n * 1) + 5
示例#4
0
yf = []
ym = []
ypca = []
yplsm = []
yplsf = []
X_fmri = scale(np.concatenate(good_data['fmri'], axis=1))
X_meg = scale(np.concatenate(good_data['meg'], axis=1))
for ncomp in max_comps:
    mse_fmri = []
    mse_meg = []
    mse_pca = []
    mse_plsm = []
    mse_plsf = []

    print 'Trying %d components' % ncomp
    plsca = PLSRegression(n_components=ncomp)
    dumb = DummyRegressor(strategy='mean')

    for oidx, (train, test) in enumerate(cv):
        X_fmri_train = X_fmri[train]
        X_fmri_test = X_fmri[test]
        X_meg_train = X_meg[train]
        X_meg_test = X_meg[test]
        y_train = sx.iloc[train].tolist()
        y_test = sx.iloc[test].tolist()

        pca = RandomizedPCA(n_components=ncomp, whiten=True)

        clf = LinearRegression().fit(pca.fit_transform(X_fmri_train), y_train)
        mse_fmri.append(mean_squared_error(clf.predict(pca.transform(X_fmri_test)), y_test))
示例#5
0
from dataset_creator import DatasetCreator

params = {"LAMBDA": 0.4, "dimension": 4096}

c = DatasetCreator(dtk_params=params, encoder_params=[4096, 3])

D = c.get_d()

n = len(D[0])

print(D[1])

train_X, train_Y = D[0][:n / 2], D[1][:n / 2]
test_X, test_Y = D[0][n / 2:], D[1][n / 2:]

pls2 = PLSRegression()
pls2.fit(train_X, train_Y)

#print(pls2.coefs)

pred = pls2.predict(test_X)

mean_err = np.mean((pred - test_Y)**2)

print(mean_err)

mean_cos = 0
mean_cos_original = 0

for i, j in zip(pred, test_Y):
    mean_cos = mean_cos + np.dot(i, j) / np.sqrt(np.dot(i, i) * np.dot(j, j))
示例#6
0
def pls(coords, intens):
	print PLSRegression().fit(coords, intens)