def test_pca_on_uncentered_data():
    pca1 = PCA(solver='svd')
    pca1.fit(X)

    pca2 = PCA(solver='eigen')
    pca2.fit(X)
    assert_almost_equal(pca1.e_vals_normalized_, pca2.e_vals_normalized_)
def test_pca_on_uncentered_data():
    pca1 = PCA(solver='svd')
    pca1.fit(X)

    pca2 = PCA(solver='eigen')
    pca2.fit(X)
    assert_almost_equal(pca1.e_vals_normalized_, pca2.e_vals_normalized_)
def test_evals():

    pca = PCA(n_components=2, solver='eigen')
    pca.fit(X_std)
    assert_almost_equal(pca.e_vals_, [2.9, 0.9, 0.2, 0.02], decimal=1)

    pca = PCA(n_components=2, solver='svd')
    pca.fit(X_std)
    assert_almost_equal(pca.e_vals_, [2.9, 0.9, 0.2, 0.02], decimal=1)
def test_evals():

    pca = PCA(n_components=2, solver='eigen')
    pca.fit(X_std)
    assert_almost_equal(pca.e_vals_, [2.9, 0.9, 0.2, 0.02], decimal=1)

    pca = PCA(n_components=2, solver='svd')
    pca.fit(X_std)
    assert_almost_equal(pca.e_vals_, [2.9, 0.9, 0.2, 0.02], decimal=1)
def test_whitening():
    pca = PCA(n_components=2)
    res = pca.fit(X_std).transform(X_std)
    diagonals_sum = np.sum(np.diagonal(np.cov(res.T)))
    assert round(diagonals_sum, 1) == 3.9, diagonals_sum

    pca = PCA(n_components=2, whitening=True)
    res = pca.fit(X_std).transform(X_std)
    diagonals_sum = np.sum(np.diagonal(np.cov(res.T)))
    assert round(diagonals_sum, 1) == 2.0, diagonals_sum
def test_whitening():
    pca = PCA(n_components=2)
    res = pca.fit(X_std).transform(X_std)
    diagonals_sum = np.sum(np.diagonal(np.cov(res.T)))
    assert round(diagonals_sum, 1) == 3.9, diagonals_sum

    pca = PCA(n_components=2, whitening=True)
    res = pca.fit(X_std).transform(X_std)
    diagonals_sum = np.sum(np.diagonal(np.cov(res.T)))
    assert round(diagonals_sum, 1) == 2.0, diagonals_sum
Exemplo n.º 7
0
def test_evals():

    pca = PCA(n_components=2, solver='eigen')
    pca.fit(X_std)

    expected = [2.93035378, 0.92740362, 0.14834223, 0.02074601]
    assert_almost_equal(pca.e_vals_, expected, decimal=5)

    pca = PCA(n_components=2, solver='svd')
    pca.fit(X_std)
    assert_almost_equal(pca.e_vals_, expected, decimal=5)
def test_loadings():

    expect = np.array([[0.9, -0.4, -0.3, 0.], [-0.5, -0.9, 0.1, -0.],
                       [1., -0., 0.1, -0.1], [1., -0.1, 0.2, 0.1]])

    pca = PCA(solver='eigen')
    pca.fit(X_std)
    assert_almost_equal(pca.loadings_, expect, decimal=1)

    expect = np.array([[-0.9, -0.4, 0.3, 0.], [0.4, -0.9, -0.1, -0.],
                       [-1., -0., -0.1, -0.1], [-1., -0.1, -0.2, 0.1]])

    pca = PCA(solver='svd')
    pca.fit(X_std)
    assert_almost_equal(pca.loadings_, expect, decimal=1)
def test_loadings():

    expect = np.array([[0.9, -0.4, -0.3, 0.],
                       [-0.5, -0.9, 0.1, -0.],
                       [1., -0., 0.1, -0.1],
                       [1., -0.1, 0.2, 0.1]])

    pca = PCA(solver='eigen')
    pca.fit(X_std)
    assert_almost_equal(pca.loadings_, expect, decimal=1)

    expect = np.array([[-0.9, -0.4, 0.3, 0.],
                       [0.4, -0.9, -0.1, -0.],
                       [-1., -0., -0.1, -0.1],
                       [-1., -0.1, -0.2, 0.1]])

    pca = PCA(solver='svd')
    pca.fit(X_std)
    assert_almost_equal(pca.loadings_, expect, decimal=1)
Exemplo n.º 10
0
from sklearn.ensemble import ExtraTreesClassifier
from mlxtend.feature_extraction import PrincipalComponentAnalysis as PCA
from sklearn.model_selection import cross_val_score
import numpy as np
import matplotlib.pyplot as plt

X = np.genfromtxt('../../contest_data/xtrain_linear_imputed.csv', delimiter=',')
y = np.genfromtxt('../../contest_data/train.csv', delimiter=',')[1:,-1]




pca = PCA(n_components=1000)
X_pca = pca.fit(X).transform(X)
et = ExtraTreesClassifier(n_estimators=1000, max_depth=None, random_state=0,verbose=0)
scores = cross_val_score(et, X_pca, y,scoring='f1_micro',cv=5,verbose=5)
print scores.mean()


et = ExtraTreesClassifier(n_estimators=300, max_depth=None, random_state=0,verbose=1)
scores = cross_val_score(et, X, y,scoring='f1_micro',cv=5,verbose=5)
print scores.mean()
'''
components=1000,estimators=1000 gives 32.6% f1
'''





pca = PCA(n_components=1000)
Exemplo n.º 11
0
def test_fail_array_transform():
    pca = PCA(n_components=2)
    pca.fit(X)
    exp = pca.transform(X[1])
def test_default_components():
    pca = PCA()
    res = pca.fit(X_std).transform(X_std)
    assert res.shape[1] == 4
def test_variance_explained_ratio():
    pca = PCA()
    pca.fit(X_std)
    assert np.sum(pca.e_vals_normalized_) == 1.
    assert np.sum(pca.e_vals_normalized_ < 0.) == 0
Exemplo n.º 14
0
def test_fail_array_fit():
    pca = PCA(n_components=2)
    pca.fit(X[1])
def test_evals():
    pca = PCA(n_components=2, solver='eigen')
    pca.fit(X)
    res = pca.fit(X).transform(X)
    assert_almost_equal(pca.e_vals_, [2.93, 0.93, 0.15, 0.02], decimal=2)
def test_default_2components():
    pca = PCA(n_components=2)
    res = pca.fit(X).transform(X)
    assert res.shape[1] == 2
def test_eigen_vs_svd():
    pca = PCA(n_components=2, solver='eigen')
    eigen_res = pca.fit(X).transform(X)
    pca = PCA(n_components=2, solver='svd')
    svd_res = pca.fit(X).transform(X)
    assert_allclose(np.absolute(eigen_res), np.absolute(svd_res), atol=0.0001)
Exemplo n.º 18
0
            color='blue', marker='^', alpha=0.5)

plt.xlabel('x coordinate')
plt.ylabel('y coordinate')

#plt.show()
plt.savefig('../figs/tutorial/mlxtendex1_1.png')
plt.close()
# Moons are linearly inseperable so standard linear PCA will fail to accurately represent data in 1D space.

#Use PCA for dimensionality reduction

#specify number of components in PCA
pca = PCA(n_components=2)
#Transform X in accordance with 2-component PCA
X_pca = pca.fit(X).transform(X)

# Red half moon
plt.scatter(X_pca[y==0, 0], X_pca[y==0, 1], # Start and peak/troughof each 'moon'.
            color ='red', marker='o', alpha=0.5)

#Blue half moon
plt.scatter(X_pca[y==1, 0], X_pca[y==1, 1], # Start and peak/troughof each 'moon'.
            color='blue', marker='^', alpha=0.5)

plt.xlabel('PC1')
plt.ylabel('PC2')

#plt.show()
plt.savefig('../figs/tutorial/mlxtendex1_2.png')
plt.close()
Exemplo n.º 19
0
def plot_pca_correlation_graph(X,
                               variables_names,
                               dimensions=(1, 2),
                               figure_axis_size=6,
                               X_pca=None,
                               explained_variance=None):
    """
    Compute the PCA for X and plots the Correlation graph

    Parameters
    ----------
    X : 2d array like.
        The columns represent the different variables and the rows are the
         samples of thos variables

    variables_names : array like
        Name of the columns (the variables) of X

    dimensions: tuple with two elements.
        dimensions to be plotted (x,y)

    figure_axis_size :
         size of the final frame. The figure created is a square with length
         and width equal to figure_axis_size.

    X_pca : np.ndarray, shape = [n_samples, n_components].
        Optional.
        `X_pca` is the matrix of the transformed components from X.
        If not provided, the function computes PCA automatically using
        mlxtend.feature_extraction.PrincipalComponentAnalysis
        Expected `n_componentes >= max(dimensions)`

    explained_variance : 1 dimension np.ndarray, length = n_components
        Optional.
        `explained_variance` are the eigenvalues from the diagonalized
        covariance matrix on the PCA transformatiopn.
        If not provided, the function computes PCA independently
        Expected `n_componentes == X.shape[1]`

    Returns
    ----------
        matplotlib_figure, correlation_matrix

    Examples
    -----------
    For usage examples, please see
    http://rasbt.github.io/mlxtend/user_guide/plotting/plot_pca_correlation_graph/

    """

    X = np.array(X)
    X = X - X.mean(axis=0)
    n_comp = max(dimensions)

    if (X_pca is None) and (explained_variance is None):
        pca = PrincipalComponentAnalysis(n_components=n_comp)
        pca.fit(X)
        X_pca = pca.transform(X)
        explained_variance = pca.e_vals_

    elif (X_pca is not None) and (explained_variance is None):
        raise ValueError("If `X_pca` is not None, the `explained variance`"
                         " values should not be `None`.")

    elif (X_pca is None) and (explained_variance is not None):
        raise ValueError("If `explained variance` is not None, the `X_pca`"
                         " values should not be `None`.")

    elif (X_pca is not None) and (explained_variance is not None):
        if X_pca.shape[1] != len(explained_variance):
            raise ValueError(f"Number of principal components must "
                             f"match the number "
                             f"of eigenvalues. Got "
                             f"{X_pca.shape[1]} "
                             f"!= "
                             f"{len(explained_variance)}")

    if X_pca.shape[1] < n_comp:
        raise ValueError(f"Input array `X_pca` contains fewer principal"
                         f" components than expected based on `dimensions`."
                         f" Got {X_pca.shape[1]} components in X_pca, expected"
                         f" at least `max(dimensions)={n_comp}`.")
    if len(explained_variance) < n_comp:
        raise ValueError(f"Input array `explained_variance` contains fewer"
                         f" elements than expected. Got"
                         f" {len(explained_variance)} elements, expected"
                         f"`X.shape[1]={X.shape[1]}`.")

    corrs = create_correlation_table(
        X_pca, X, ['Dim ' + str(i + 1) for i in range(n_comp)],
        variables_names)
    tot = sum(X.var(0)) * X.shape[0] / (X.shape[0] - 1)
    explained_var_ratio = [(i / tot) * 100 for i in explained_variance]

    # Plotting circle
    fig_res = plt.figure(figsize=(figure_axis_size, figure_axis_size))
    plt.Circle((0, 0), radius=1, color='k', fill=False)
    circle1 = plt.Circle((0, 0), radius=1, color='k', fill=False)
    fig = plt.gcf()
    fig.gca().add_artist(circle1)

    # Plotting arrows
    texts = []
    for name, row in corrs.iterrows():
        x = row['Dim ' + str(dimensions[0])]
        y = row['Dim ' + str(dimensions[1])]
        plt.arrow(0.0,
                  0.0,
                  x,
                  y,
                  color='k',
                  length_includes_head=True,
                  head_width=.05)

        plt.plot([0.0, x], [0.0, y], 'k-')
        texts.append(plt.text(x, y, name, fontsize=2 * figure_axis_size))
    # Plotting vertical lines
    plt.plot([-1.1, 1.1], [0, 0], 'k--')
    plt.plot([0, 0], [-1.1, 1.1], 'k--')

    # Adjusting text
    adjust_text(texts)
    # Setting limits and title
    plt.xlim((-1.1, 1.1))
    plt.ylim((-1.1, 1.1))
    plt.title("Correlation Circle", fontsize=figure_axis_size * 3)

    plt.xlabel("Dim " + str(dimensions[0]) +
               " (%s%%)" % str(explained_var_ratio[dimensions[0] - 1])[:4],
               fontsize=figure_axis_size * 2)
    plt.ylabel("Dim " + str(dimensions[1]) +
               " (%s%%)" % str(explained_var_ratio[dimensions[1] - 1])[:4],
               fontsize=figure_axis_size * 2)
    return fig_res, corrs
def test_default_components():
    pca = PCA()
    res = pca.fit(X_std).transform(X_std)
    assert res.shape[1] == 4
def test_variance_explained_ratio():
    pca = PCA()
    pca.fit(X_std)
    assert np.sum(pca.e_vals_normalized_) == 1.
    assert np.sum(pca.e_vals_normalized_ < 0.) == 0
Exemplo n.º 22
0
    #print scores

np.savetxt('../../contest_data/xtest_tree_imputed.csv', xtrain, delimiter=',')

from sklearn.naive_bayes import MultinomialNB
gnb = MultinomialNB()
scores = cross_val_score(gnb,
                         xtrain,
                         ytrain,
                         scoring='f1_micro',
                         cv=5,
                         verbose=5)
print scores.mean()

pca = PCA(n_components=300)
xtrain_pca = pca.fit(xtrain[:, 500:]).transform(xtrain[:, 500:])
xtest_pca = pca.fit(xtrain[:, 500:]).transform(xtest[:, 500:])
#imputing test data
for i in range(500):
    print i
    train_missings = np.isnan(xtrain[:, i])
    train_nonmissings = ~train_missings
    test_missings = np.isnan(xtest[:, i])
    test_nonmissings = ~test_missings
    xtest[test_missings,
          i] = lin.fit(xtrain_pca[train_nonmissings, :],
                       xtrain[train_nonmissings,
                              i]).predict(xtest_pca[test_missings, :])

np.savetxt('../../contest_data/xtest_linear_imputed.csv', xtest, delimiter=',')
'''
def test_default_2components():
    pca = PCA(n_components=2)
    res = pca.fit(X).transform(X)
    assert res.shape[1] == 2
def test_variance_explained_ratio():
    pca = PCA()
    pca.fit(X_std)
    assert math.isclose(np.sum(pca.e_vals_normalized_), 1.)
    assert math.isclose(np.sum(pca.e_vals_normalized_ < 0.), 0, abs_tol=1e-10)
def test_evals():
    pca = PCA(n_components=2, solver='eigen')
    pca.fit(X)
    res = pca.fit(X).transform(X)
    assert_almost_equal(pca.e_vals_, [2.93, 0.93, 0.15, 0.02], decimal=2)
def plot_pca_correlation_graph(X,
                               variables_names,
                               dimensions=(1, 2),
                               figure_axis_size=6,
                               X_pca=None):
    """
    Compute the PCA for X and plots the Correlation graph

    Parameters
    ----------
    X : 2d array like.
        The columns represent the different variables and the rows are the
         samples of thos variables
    variables_names : array like
        Name of the columns (the variables) of X
    dimensions: tuple with two elements.
        dimensions to be plot (x,y)
    X_pca : optional. if not provided, compute PCA independently
    figure_axis_size :
         size of the final frame. The figure created is a square with length
         and width equal to figure_axis_size.
    Returns
    ----------
        matplotlib_figure , correlation_matrix
    """
    X = np.array(X)
    X = X - X.mean(axis=0)
    n_comp = max(dimensions)

    if X_pca is None:
        pca = PrincipalComponentAnalysis(n_components=n_comp)
        pca.fit(X)
        X_pca = pca.transform(X)

    corrs = create_correlation_table(
        X_pca, X, ['Dim ' + str(i + 1) for i in range(n_comp)],
        variables_names)
    tot = sum(pca.e_vals_)
    explained_var_ratio = [(i / tot) * 100 for i in pca.e_vals_]

    # Plotting circle
    fig_res = plt.figure(figsize=(figure_axis_size, figure_axis_size))
    plt.Circle((0, 0), radius=1, color='k', fill=False)
    circle1 = plt.Circle((0, 0), radius=1, color='k', fill=False)
    fig = plt.gcf()
    fig.gca().add_artist(circle1)

    # Plotting arrows
    texts = []
    for name, row in corrs.iterrows():
        x = row['Dim ' + str(dimensions[0])]
        y = row['Dim ' + str(dimensions[1])]
        plt.arrow(0.0,
                  0.0,
                  x,
                  y,
                  color='k',
                  length_includes_head=True,
                  head_width=.05)

        plt.plot([0.0, x], [0.0, y], 'k-')
        texts.append(plt.text(x, y, name, fontsize=2 * figure_axis_size))
    # Plotting vertical lines
    plt.plot([-1.1, 1.1], [0, 0], 'k--')
    plt.plot([0, 0], [-1.1, 1.1], 'k--')

    # Adjusting text
    adjust_text(texts)
    # Setting limits and title
    plt.xlim((-1.1, 1.1))
    plt.ylim((-1.1, 1.1))
    plt.title("Correlation Circle", fontsize=figure_axis_size * 3)

    plt.xlabel("Dim " + str(dimensions[0]) + " (%s%%)" %
               str(explained_var_ratio[dimensions[0] - 1])[:4].lstrip("0."),
               fontsize=figure_axis_size * 2)
    plt.ylabel("Dim " + str(dimensions[1]) + " (%s%%)" %
               str(explained_var_ratio[dimensions[1] - 1])[:4].lstrip("0."),
               fontsize=figure_axis_size * 2)
    return fig_res, corrs
def test_eigen_vs_svd():
    pca = PCA(n_components=2, solver='eigen')
    eigen_res = pca.fit(X).transform(X)
    pca = PCA(n_components=2, solver='svd')
    svd_res = pca.fit(X).transform(X)
    assert_allclose(np.absolute(eigen_res), np.absolute(svd_res), atol=0.0001)
Exemplo n.º 28
0
def test_variance_explained_ratio():
    pca = PCA()
    pca.fit(X_std)
    assert_almost_equal(np.sum(pca.e_vals_normalized_), 1.)
    assert np.sum(pca.e_vals_normalized_ < 0.) == 0
Exemplo n.º 29
0
def plot_pca_correlation_graph(X, variables_names, dimensions=(1, 2),
                               figure_axis_size=6, X_pca=None):
    """
    Compute the PCA for X and plots the Correlation graph

    Parameters
    ----------
    X : 2d array like.
        The columns represent the different variables and the rows are the
         samples of thos variables
    variables_names : array like
        Name of the columns (the variables) of X
    dimensions: tuple with two elements.
        dimensions to be plot (x,y)
    X_pca : optional. if not provided, compute PCA independently
    figure_axis_size :
         size of the final frame. The figure created is a square with length
         and width equal to figure_axis_size.
    Returns
    ----------
        matplotlib_figure , correlation_matrix
    """
    X = np.array(X)
    X = X - X.mean(axis=0)
    n_comp = max(dimensions)

    if X_pca is None:
        pca = PrincipalComponentAnalysis(n_components=n_comp)
        pca.fit(X)
        X_pca = pca.transform(X)

    corrs = create_correlation_table(X_pca, X, ['Dim ' + str(i + 1) for i in
                                                range(n_comp)],
                                     variables_names)
    tot = sum(pca.e_vals_)
    explained_var_ratio = [(i / tot) * 100 for i in pca.e_vals_]

    # Plotting circle
    fig_res = plt.figure(figsize=(figure_axis_size, figure_axis_size))
    plt.Circle((0, 0), radius=1, color='k', fill=False)
    circle1 = plt.Circle((0, 0), radius=1, color='k', fill=False)
    fig = plt.gcf()
    fig.gca().add_artist(circle1)

    # Plotting arrows
    texts = []
    for name, row in corrs.iterrows():
        x = row['Dim ' + str(dimensions[0])]
        y = row['Dim ' + str(dimensions[1])]
        plt.arrow(0.0, 0.0, x, y, color='k', length_includes_head=True,
                  head_width=.05)

        plt.plot([0.0, x], [0.0, y], 'k-')
        texts.append(plt.text(x, y, name, fontsize=2 * figure_axis_size))
    # Plotting vertical lines
    plt.plot([-1.1, 1.1], [0, 0], 'k--')
    plt.plot([0, 0], [-1.1, 1.1], 'k--')

    # Adjusting text
    adjust_text(texts)
    # Setting limits and title
    plt.xlim((-1.1, 1.1))
    plt.ylim((-1.1, 1.1))
    plt.title("Correlation Circle", fontsize=figure_axis_size * 3)

    plt.xlabel("Dim " + str(dimensions[0]) + " (%s%%)" %
               str(explained_var_ratio[dimensions[0] - 1])[:4].lstrip("0."),
               fontsize=figure_axis_size * 2)
    plt.ylabel("Dim " + str(dimensions[1]) + " (%s%%)" %
               str(explained_var_ratio[dimensions[1] - 1])[:4].lstrip("0."),
               fontsize=figure_axis_size * 2)
    return fig_res, corrs
Exemplo n.º 30
0
def test_default_components():
    pca = PCA(n_components=0)
    pca.fit(X)
    res = pca.fit(X).transform(X)