from time import time
import numpy as np
import matplotlib.pyplot as plt
from matplotlib import offsetbox
from sklearn import (manifold, datasets, decomposition, ensemble,
discriminant_analysis, random_projection, neighbors)
digits = datasets.load_digits(n_class=6)
X,y = digits.data, digits.target
n_samples, n_features = X.shape
n_neighbors = 30
def plot_embedding(X, title=None):
x_min, x_max = np.min(X, 0), np.max(X, 0)
X = (X - x_min) / (x_max - x_min)
plt.figure()
ax = plt.subplot(111)
for i in range(X.shape[0]):
plt.text(X[i, 0], X[i, 1], str(y[i]),
color=plt.cm.Set1(y[i] / 10.),
fontdict={'weight': 'bold', 'size': 9})
if hasattr(offsetbox, 'AnnotationBbox'):
# only print thumbnails with matplotlib > 1.0
shown_images = np.array([[1., 1.]]) # just something big
for i in range(X.shape[0]):
dist = np.sum((X[i] - shown_images) ** 2, 1)
if np.min(dist) < 4e-3:
continue # don't show points that are too close
shown_images = np.r_[shown_images, [X[i]]]
imagebox = offsetbox.AnnotationBbox(
offsetbox.OffsetImage(digits.images[i],
cmap=plt.cm.gray_r),
X[i])
ax.add_artist(imagebox)
plt.xticks([]), plt.yticks([])
if title is not None:
plt.title(title)
# Plot images of the digits
n_img_per_row = 20
img = np.zeros((10*n_img_per_row,
10*n_img_per_row))
for i in range(n_img_per_row):
ix = 10*i+1
for j in range(n_img_per_row):
iy = 10*j+1
img[ix:ix + 8,
iy:iy + 8] = X[i * n_img_per_row + j].reshape((8, 8))
plt.imshow(img, cmap=plt.cm.binary)
plt.xticks([]); plt.yticks([]); plt.title('From the 64-D digits dataset')
Text(0.5, 1.0, 'From the 64-D digits dataset')
t0 = time()
rp = random_projection.SparseRandomProjection(n_components=2, random_state=42)
X_projected = rp.fit_transform(X)
plot_embedding(X_projected, "Sparse Random Projection (time %.3fs)" % (time()-t0))
t0 = time()
X_pca = decomposition.TruncatedSVD(n_components=2).fit_transform(X)
plot_embedding(X_pca,
"Principal Components (Truncated SVD) (time %.3fs)" %
(time() - t0))
X2 = X.copy(); X2.flat[::X.shape[1] + 1] += 0.01 # Make X invertible
t0 = time()
X_lda = discriminant_analysis.LinearDiscriminantAnalysis(n_components=2).fit_transform(X2, y)
plot_embedding(X_lda, "LDA (time %.3fs)" % (time() - t0))
path_method
selects either Dijkstra's or Floyd-Warshall algorithm. The code chooses a method, based on the dataset, if not specified.eigen_solver
controls the solver method & looks for a best method, based on the data, if not specified. The computation cost can be improved if ARPACK
is used.t0 = time()
X_iso = manifold.Isomap(n_neighbors=n_neighbors, n_components=2).fit_transform(X)
plot_embedding(X_iso, "Isomap (time %.3fs)" % (time() - t0))
clf = manifold.LocallyLinearEmbedding(n_neighbors=n_neighbors, n_components=2,
method='standard')
t0 = time()
X_lle = clf.fit_transform(X)
plot_embedding(X_lle, "LLE (time %.3fs)" % (time() - t0))
print("Reconstruction error: %g" % clf.reconstruction_error_)
Reconstruction error: 2.11987e-06
method="modified"
controls this feature. It requires n_neighbors
> n_components
.clf = manifold.LocallyLinearEmbedding(
n_neighbors=n_neighbors, n_components=2, method='modified')
t0 = time()
X_mlle = clf.fit_transform(X)
plot_embedding(X_mlle, "LLE (modified) (time %.2fs)" % (time() - t0))
print("Reconstruction error: %g" % clf.reconstruction_error_)
Reconstruction error: 0.360724
method="hessian"
controls this feature. It requires n_neighbors
> n_components*(n_components+3)/2
.clf = manifold.LocallyLinearEmbedding(
n_neighbors=n_neighbors, n_components=2, method='hessian')
t0 = time()
X_hlle = clf.fit_transform(X)
plot_embedding(X_hlle, "LLE (Hessian) (time %.3fs)" %
(time() - t0))
print("Reconstruction error: %g" % clf.reconstruction_error_)
Reconstruction error: 0.212673
clf = manifold.LocallyLinearEmbedding(n_neighbors=n_neighbors, n_components=2, method='ltsa')
t0 = time()
X_ltsa = clf.fit_transform(X)
plot_embedding(X_ltsa, "LTSA (time %.3fs)" % (time() - t0))
print("Reconstruction error: %g" % clf.reconstruction_error_)
Reconstruction error: 0.212677
clf = manifold.MDS(n_components=2, n_init=1, max_iter=100)
t0 = time()
X_mds = clf.fit_transform(X)
plot_embedding(X_mds, "MDS (time %.2fs)" % (time() - t0))
print("Stress: %f" % clf.stress_)
Stress: 140185087.993700
# metric vs non-metric MDS - reconstructed points on noisy data
# (plots slightly shifted to avoid complete overlap)
import numpy as np
from matplotlib import pyplot as plt
from matplotlib.collections import LineCollection
from sklearn import manifold
from sklearn.metrics import euclidean_distances
from sklearn.de