methods = ['standard', 'ltsa', 'hessian', 'modified']

fig = pylab.figure(figsize=(8, 12))

try:
    # compatibility matplotlib < 1.0
    ax = fig.add_axes((0.25, 0.66, 0.4, 0.3), projection='3d')
    ax.scatter(X[:, 0], X[:, 1], X[:, 2], c=color, cmap=pylab.cm.Spectral)
    ax.view_init(4, -72)
except:
    ax = fig.add_axes((0.25, 0.66, 0.5, 0.3))
    ax.scatter(X[:, 0], X[:, 2], c=color, cmap=pylab.cm.Spectral)

ax.set_title('Original Data')

for i, method in enumerate(methods):
    t0 = time()
    Y, err = manifold.locally_linear_embedding(
        X, n_neighbors, out_dim, eigen_solver='arpack', method=method)
    t1 = time()
    print "%s: %.2g sec" % (methods[i], t1 - t0)
    print ' err = %.2e' % err

    ax = fig.add_subplot(323 + i)
    ax.scatter(Y[:, 0], Y[:, 1], c=color, cmap=pylab.cm.Spectral)
    ax.set_title("method = %s" % methods[i])
    ax.xaxis.set_major_formatter(NullFormatter())
    ax.yaxis.set_major_formatter(NullFormatter())

pylab.show()
Пример #2
0
print "Computing PCA projection"
X_pca = decomposition.RandomizedPCA(n_components=2).fit_transform(X)

# ----------------------------------------------------------------------
# Projection on to the first 2 linear discriminant components

print "Computing LDA projection"
X2 = X.copy()
X2.flat[:: X.shape[1] + 1] += 0.01  # Make X invertible
X_lda = lda.LDA(n_components=2).fit_transform(X2, y)


# ----------------------------------------------------------------------
# Locally linear embedding of the digits dataset
print "Computing LLE embedding"
X_lle, err = manifold.locally_linear_embedding(X, 30, 2, reg=1e-2)
print "Done. Reconstruction error: %g" % err

# ----------------------------------------------------------------------
# Scale and visualize the embedding vectors


def plot_embedding(X, title=None):
    x_min, x_max = np.min(X, 0), np.max(X, 0)
    X = (X - x_min) / (x_max - x_min)

    pl.figure()
    ax = pl.subplot(111)
    for i in range(digits.data.shape[0]):
        pl.text(
            X[i, 0],
Пример #3
0
print "Computing PCA projection"
X_pca = decomposition.RandomizedPCA(n_components=2).fit_transform(X)

#----------------------------------------------------------------------
# Projection on to the first 2 linear discriminant components

print "Computing LDA projection"
X2 = X.copy()
X2.flat[::X.shape[1] + 1] += 0.01  # Make X invertible
X_lda = lda.LDA(n_components=2).fit_transform(X2, y)

#----------------------------------------------------------------------
# Locally linear embedding of the digits dataset
print "Computing LLE embedding"
X_lle, err = manifold.locally_linear_embedding(X, 30, 2, reg=1e-2)
print "Done. Reconstruction error: %g" % err

#----------------------------------------------------------------------
# Scale and visualize the embedding vectors


def plot_embedding(X, title=None):
    x_min, x_max = np.min(X, 0), np.max(X, 0)
    X = (X - x_min) / (x_max - x_min)

    pl.figure()
    ax = pl.subplot(111)
    for i in range(digits.data.shape[0]):
        pl.text(X[i, 0],
                X[i, 1],
import pylab as pl
from matplotlib import offsetbox
from scikits.learn.utils.fixes import qr_economic
from scikits.learn import manifold, datasets, decomposition, lda

<<<<<<< REMOTE
X = digits.data
=======
method = 'modified'
>>>>>>> LOCAL
<<<<<<< REMOTE
n_samples, n_features = X.shape
=======
print "Computing LLE embedding (method = %s)" % method
>>>>>>> LOCAL
X_r, err = manifold.locally_linear_embedding(digits.data, 30, 2, reg=1e-2,
                                             method=method)
# Random 2D projection using a random unitary matrix
print "Computing random projection"
Q, _ = qr_economic(rng.normal(size=(n_features, 2)))
X_projected = np.dot(Q.T, X.T).T
# Projection on to the first 2 principal components
print "Computing PCA projection"
X_pca = decomposition.RandomizedPCA(n_components=2).fit_transform(X)
#----------------------------------------------------------------------
# Projection on to the first 2 linear discriminant components

print "Computing LDA projection"
X2 = X.copy()
X2.flat[::X.shape[1] + 1] += 0.01 # Make X invertible
X_lda = lda.LDA(n_components=2).fit_transform(X2, y)
Пример #5
0
print "Computing PCA projection"
X_pca = decomposition.RandomizedPCA(n_components=2).fit_transform(X)

#----------------------------------------------------------------------
# Projection on to the first 2 linear discriminant components

print "Computing LDA projection"
X2 = X.copy()
X2.flat[::X.shape[1] + 1] += 0.01  # Make X invertible
X_lda = lda.LDA(n_components=2).fit_transform(X2, y)


#----------------------------------------------------------------------
# Locally linear embedding of the digits dataset
print "Computing LLE embedding"
X_lle, err = manifold.locally_linear_embedding(X, n_neighbors, 2)
print "Done. Reconstruction error: %g" % err


#----------------------------------------------------------------------
# Modified Locally linear embedding of the digits dataset
print "Computing modified LLE embedding"
X_mlle, err = manifold.locally_linear_embedding(X, n_neighbors, 2,
                                                method='modified')
print "Done. Reconstruction error: %g" % err


#----------------------------------------------------------------------
# Isomap projection of the digits dataset
print "Computing Isomap embedding"
X_iso = manifold.Isomap(n_neighbors, 2).fit_transform(X)
Пример #6
0
print __doc__

import pylab as pl

# This import is needed to modify the way figure behaves
from mpl_toolkits.mplot3d import Axes3D

# ----------------------------------------------------------------------
# Locally linear embedding of the swiss roll

from scikits.learn import manifold, datasets

X, color = datasets.samples_generator.make_swiss_roll(n_samples=1500)

print "Computing LLE embedding"
X_r, err = manifold.locally_linear_embedding(X, n_neighbors=12, out_dim=2)
print "Done. Reconstruction error: %g" % err

# ----------------------------------------------------------------------
# Plot result

fig = pl.figure()
try:
    # compatibility matplotlib < 1.0
    ax = fig.add_subplot(211, projection="3d")
    ax.scatter(X[:, 0], X[:, 1], X[:, 2], c=color, cmap=pl.cm.Spectral)
except:
    ax = fig.add_subplot(211)
    ax.scatter(X[:, 0], X[:, 2], c=color, cmap=pl.cm.Spectral)

ax.set_title("Original data")
Пример #7
0
# License: BSD, (C) INRIA 2011

print __doc__

import pylab as pl
# This import is needed to modify the way figure behaves
from mpl_toolkits.mplot3d import Axes3D

#----------------------------------------------------------------------
# Locally linear embedding of the swiss roll

from scikits.learn import manifold, datasets
X, color = datasets.samples_generator.swiss_roll(n_samples=1500)

print "Computing LLE embedding"
X_r, err = manifold.locally_linear_embedding(X, n_neighbors=12, out_dim=2)
print "Done. Reconstruction error: %g" % err

#----------------------------------------------------------------------
# Plot result

fig = pl.figure()
try:
    # compatibility matplotlib < 1.0
    ax = fig.add_subplot(211, projection='3d')
    ax.scatter(X[:, 0], X[:, 1], X[:, 2], c=color, cmap=pl.cm.Spectral)
except:
    ax = fig.add_subplot(211)
    ax.scatter(X[:, 0], X[:, 2], c=color, cmap=pl.cm.Spectral)

ax.set_title("Original data")