예제 #1
0
 def test_target_explained_variance(self):
     np.random.seed(0)
     clf = PCA()
     clf.fit(self.X)
     ax = skplt.plot_pca_component_variance(clf, target_explained_variance=0)
     ax = skplt.plot_pca_component_variance(clf, target_explained_variance=0.5)
     ax = skplt.plot_pca_component_variance(clf, target_explained_variance=1)
     ax = skplt.plot_pca_component_variance(clf, target_explained_variance=1.5)
예제 #2
0
 def test_target_explained_variance(self):
     np.random.seed(0)
     clf = PCA()
     clf.fit(self.X)
     ax = skplt.plot_pca_component_variance(clf, target_explained_variance=0)
     ax = skplt.plot_pca_component_variance(clf, target_explained_variance=0.5)
     ax = skplt.plot_pca_component_variance(clf, target_explained_variance=1)
     ax = skplt.plot_pca_component_variance(clf, target_explained_variance=1.5)
예제 #3
0
 def test_ax(self):
     np.random.seed(0)
     clf = PCA()
     clf.fit(self.X)
     fig, ax = plt.subplots(1, 1)
     out_ax = skplt.plot_pca_component_variance(clf)
     assert ax is not out_ax
     out_ax =skplt.plot_pca_component_variance(clf, ax=ax)
     assert ax is out_ax
예제 #4
0
 def test_ax(self):
     np.random.seed(0)
     clf = PCA()
     clf.fit(self.X)
     fig, ax = plt.subplots(1, 1)
     out_ax = skplt.plot_pca_component_variance(clf)
     assert ax is not out_ax
     out_ax = skplt.plot_pca_component_variance(clf, ax=ax)
     assert ax is out_ax
rf = rf.fit(X, y)
skplt.plot_feature_importances(rf, feature_names=['petal length', 'petal width',
                                                  'sepal length', 'sepal width'])
plt.show()
#kjøre PCA med 50 variable og se hvor mye dimensjonene forklarer varians

from sklearn.decomposition import PCA
from sklearn.datasets import load_digits as load_data
import scikitplot.plotters as skplt
import matplotlib.pyplot as plt

pca = decomposition.PCA()
pca.fit(X)


skplt.plot_pca_component_variance(pca)
plt.show()

# Plot the PCA spectrum
pca.fit(X)

plt.figure(1, figsize=(4, 3))
plt.clf()
plt.axes([.2, .2, .7, .7])
plt.plot(pca.explained_variance_, linewidth=2)
plt.axis('tight')
plt.xlabel('n_components')
plt.ylabel('explained_variance_')

###############################################################################
# Prediction
"""An example showing the plot_pca_component_variance method used by a scikit-learn PCA object"""
from sklearn.decomposition import PCA
from sklearn.datasets import load_digits as load_data
import scikitplot.plotters as skplt
import matplotlib.pyplot as plt

X, y = load_data(return_X_y=True)
pca = PCA(random_state=1)
pca.fit(X)
skplt.plot_pca_component_variance(pca)
plt.show()