Exemple #1
0
def fit_pls(X, Y, n_components, scale=True, algorithm="randomized"):
    #scaling

    print "calculating SVD"
    if scale:
        X_scaled = zscore(X, axis=0, ddof=1)
        Y_scaled = zscore(Y, axis=0, ddof=1)
        covariance = np.dot(Y_scaled.T, X_scaled)
    else:
        covariance = np.dot(Y.T, X)

    print np.shape(covariance)
    sum_var = covariance
    svd = TruncatedSVD(n_components, algorithm)
    #computes only the first n_components largest singular values
    #produces a low-rank approximation of covariance matrix
    Y_saliences, singular_values, X_saliences = svd._fit(covariance)
    X_saliences = X_saliences.T
    inertia = singular_values.sum()

    if scale:
        return X_saliences, Y_saliences, singular_values, inertia, X_scaled, Y_scaled, sum_var
    else:
        return X_saliences, Y_saliences, singular_values, inertia
Exemple #2
0
                                        np.floor(0.8 *
                                                 np.shape(brain_data)[0]),
                                        replace=False)
        training[training_ind] = True
        test = np.invert(training)

        #calculate SVD from training data set
        X_saliences, Y_saliences, singular_values, inertia, X_scaled, Y_scaled, sum_var = fit_pls(
            brain_data[training, :],
            Ob[training, :],
            n_comp,
            scale=True,
            algorithm="randomized")

        #multiply saliencies with test (leftout) dataset (first scale this one)
        brain_test_scaled = zscore(brain_data[test, :], axis=0, ddof=1)
        Ob_test_scaled = zscore(Ob[test, :], axis=0, ddof=1)

        for comp in np.arange(0, n_comp):
            X_saliences_splits[i, comp, :] = X_saliences[:, comp]
            Y_saliences_splits[i, comp, :] = Y_saliences[:, comp]

            lv_brain = np.matrix(brain_test_scaled) * np.matrix(X_saliences)
            lv_ob = np.matrix(Ob_test_scaled) * np.matrix(Y_saliences)

            corr_vals[i, comp, 0] = pearsonr(lv_ob[:, comp], lv_brain[:,
                                                                      comp])[0]

            #permute the smaller matrix of the training data and reproject
            for j in np.arange(1, perm_n + 1):
                Ob_test_scaled_perm = np.random.permutation(Ob_test_scaled)