Submit
Path:
~
/
/
opt
/
alt
/
python35
/
share
/
doc
/
alt-python35-scikit-learn-0.18.1
/
examples
/
File Content:
plot_multioutput_face_completion.py
""" ============================================== Face completion with a multi-output estimators ============================================== This example shows the use of multi-output estimator to complete images. The goal is to predict the lower half of a face given its upper half. The first column of images shows true faces. The next columns illustrate how extremely randomized trees, k nearest neighbors, linear regression and ridge regression complete the lower half of those faces. """ print(__doc__) import numpy as np import matplotlib.pyplot as plt from sklearn.datasets import fetch_olivetti_faces from sklearn.utils.validation import check_random_state from sklearn.ensemble import ExtraTreesRegressor from sklearn.neighbors import KNeighborsRegressor from sklearn.linear_model import LinearRegression from sklearn.linear_model import RidgeCV # Load the faces datasets data = fetch_olivetti_faces() targets = data.target data = data.images.reshape((len(data.images), -1)) train = data[targets < 30] test = data[targets >= 30] # Test on independent people # Test on a subset of people n_faces = 5 rng = check_random_state(4) face_ids = rng.randint(test.shape[0], size=(n_faces, )) test = test[face_ids, :] n_pixels = data.shape[1] X_train = train[:, :np.ceil(0.5 * n_pixels)] # Upper half of the faces y_train = train[:, np.floor(0.5 * n_pixels):] # Lower half of the faces X_test = test[:, :np.ceil(0.5 * n_pixels)] y_test = test[:, np.floor(0.5 * n_pixels):] # Fit estimators ESTIMATORS = { "Extra trees": ExtraTreesRegressor(n_estimators=10, max_features=32, random_state=0), "K-nn": KNeighborsRegressor(), "Linear regression": LinearRegression(), "Ridge": RidgeCV(), } y_test_predict = dict() for name, estimator in ESTIMATORS.items(): estimator.fit(X_train, y_train) y_test_predict[name] = estimator.predict(X_test) # Plot the completed faces image_shape = (64, 64) n_cols = 1 + len(ESTIMATORS) plt.figure(figsize=(2. * n_cols, 2.26 * n_faces)) plt.suptitle("Face completion with multi-output estimators", size=16) for i in range(n_faces): true_face = np.hstack((X_test[i], y_test[i])) if i: sub = plt.subplot(n_faces, n_cols, i * n_cols + 1) else: sub = plt.subplot(n_faces, n_cols, i * n_cols + 1, title="true faces") sub.axis("off") sub.imshow(true_face.reshape(image_shape), cmap=plt.cm.gray, interpolation="nearest") for j, est in enumerate(sorted(ESTIMATORS)): completed_face = np.hstack((X_test[i], y_test_predict[est][i])) if i: sub = plt.subplot(n_faces, n_cols, i * n_cols + 2 + j) else: sub = plt.subplot(n_faces, n_cols, i * n_cols + 2 + j, title=est) sub.axis("off") sub.imshow(completed_face.reshape(image_shape), cmap=plt.cm.gray, interpolation="nearest") plt.show()
Submit
FILE
FOLDER
Name
Size
Permission
Action
applications
---
0755
bicluster
---
0755
calibration
---
0755
classification
---
0755
cluster
---
0755
covariance
---
0755
cross_decomposition
---
0755
datasets
---
0755
decomposition
---
0755
ensemble
---
0755
exercises
---
0755
feature_selection
---
0755
gaussian_process
---
0755
linear_model
---
0755
manifold
---
0755
mixture
---
0755
model_selection
---
0755
neighbors
---
0755
neural_networks
---
0755
preprocessing
---
0755
semi_supervised
---
0755
svm
---
0755
text
---
0755
tree
---
0755
README.txt
116 bytes
0644
feature_stacker.py
1911 bytes
0644
hetero_feature_union.py
6241 bytes
0644
missing_values.py
3055 bytes
0644
plot_compare_reduction.py
2489 bytes
0644
plot_cv_predict.py
799 bytes
0644
plot_digits_pipe.py
1813 bytes
0644
plot_isotonic_regression.py
1767 bytes
0644
plot_johnson_lindenstrauss_bound.py
7474 bytes
0644
plot_kernel_approximation.py
8004 bytes
0644
plot_kernel_ridge_regression.py
6269 bytes
0644
plot_multilabel.py
4157 bytes
0644
plot_multioutput_face_completion.py
3019 bytes
0644
N4ST4R_ID | Naxtarrr