import numpy as np
import matplotlib.pyplot as plt
from matplotlib.colors import ListedColormap
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import StandardScaler
from sklearn.datasets import make_moons, make_circles, make_classification
%matplotlib inline
%config InlineBackend.figure_format = 'retina'
Representation of Ensemble Models
ML
from sklearn.ensemble import RandomForestClassifier
from sklearn.tree import DecisionTreeClassifier
= .02 # step size in the mesh h
= make_classification(n_features=2, n_redundant=0, n_informative=2,
X, y =1, n_clusters_per_class=1)
random_state= np.random.RandomState(2)
rng += 2 * rng.uniform(size=X.shape)
X = (X, y)
linearly_separable
= [make_moons(noise=0.3, random_state=0),
datasets =0.2, factor=0.5, random_state=1),
make_circles(noise
]
from latexify import latexify
try:
latexify()except:
pass
def plot_comparison(maximum_depth):
= ["Decision Tree (Depth %d)" %maximum_depth, "Random Forest"]
names = [
classifiers =maximum_depth),
DecisionTreeClassifier(max_depth=maximum_depth, n_estimators=200, max_features=1),
RandomForestClassifier(max_depth
]= plt.figure(figsize=(8, 4))
figure = 1
i # iterate over datasets
for ds_cnt, ds in enumerate(datasets):
# preprocess dataset, split into training and test part
= ds
X, y = StandardScaler().fit_transform(X)
X = \
X_train, X_test, y_train, y_test =.4, random_state=42)
train_test_split(X, y, test_size
= X[:, 0].min() - .5, X[:, 0].max() + .5
x_min, x_max = X[:, 1].min() - .5, X[:, 1].max() + .5
y_min, y_max = np.meshgrid(np.arange(x_min, x_max, h),
xx, yy
np.arange(y_min, y_max, h))
# just plot the dataset first
= plt.cm.RdBu
cm = ListedColormap(['#FF0000', '#0000FF'])
cm_bright = plt.subplot(len(datasets), len(classifiers) + 1, i)
ax if ds_cnt == 0:
"Input data")
ax.set_title(# Plot the training points
0], X_train[:, 1], c=y_train, cmap=cm_bright,
ax.scatter(X_train[:, ='k')
edgecolors# Plot the testing points
0], X_test[:, 1], c=y_test, cmap=cm_bright, alpha=0.6,
ax.scatter(X_test[:, ='k')
edgecolorsmin(), xx.max())
ax.set_xlim(xx.min(), yy.max())
ax.set_ylim(yy.
ax.set_xticks(())
ax.set_yticks(())+= 1
i
# iterate over classifiers
for name, clf in zip(names, classifiers):
= plt.subplot(len(datasets), len(classifiers) + 1, i)
ax
clf.fit(X_train, y_train)= clf.score(X_test, y_test)
score
# Plot the decision boundary. For that, we will assign a color to each
# point in the mesh [x_min, x_max]x[y_min, y_max].
if hasattr(clf, "decision_function"):
= clf.decision_function(np.c_[xx.ravel(), yy.ravel()])
Z else:
= clf.predict_proba(np.c_[xx.ravel(), yy.ravel()])[:, 1]
Z
# Put the result into a color plot
= Z.reshape(xx.shape)
Z =cm, alpha=.8)
ax.contourf(xx, yy, Z, cmap
# Plot the training points
0], X_train[:, 1], c=y_train, cmap=cm_bright,
ax.scatter(X_train[:, ='k')
edgecolors# Plot the testing points
0], X_test[:, 1], c=y_test, cmap=cm_bright,
ax.scatter(X_test[:, ='k', alpha=0.6)
edgecolors
min(), xx.max())
ax.set_xlim(xx.min(), yy.max())
ax.set_ylim(yy.
ax.set_xticks(())
ax.set_yticks(())if ds_cnt == 0:
ax.set_title(name)max() - .3, yy.min() + .3, ('%.2f' % score).lstrip('0'),
ax.text(xx.=15, horizontalalignment='right')
size+= 1
i
f"../figures/ensemble/{str(maximum_depth)}-representation.pdf" , transparent=True, bbox_inches="tight") plt.savefig(
2) plot_comparison(
1) plot_comparison(