import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
from matplotlib import cm
%matplotlib inline
%config InlineBackend.figure_format = 'retina'
SVM Kernel
SVM Kernel
= np.array([-2, -1, 0, 1, 2, 3, 4]).reshape(-1, 1)
X = np.array([-1, -1, 1, 1, 1, -1, -1]) y
=y, cmap=cm.Paired) plt.scatter(X.flatten(), np.zeros_like(X), c
def phi(x):
return np.hstack([x, x**2])
phi(X)
array([[-2, 4],
[-1, 1],
[ 0, 0],
[ 1, 1],
[ 2, 4],
[ 3, 9],
[ 4, 16]])
0], phi(X)[:, 1], c = y,cmap = cm.Paired) plt.scatter(phi(X)[:,
from sklearn.svm import SVC
= SVC(kernel='linear', C = 1e6) c
c.fit(phi(X), y)
SVC(C=1000000.0, kernel='linear')In a Jupyter environment, please rerun this cell to show the HTML representation or trust the notebook.
On GitHub, the HTML representation is unable to render, please try loading this page with nbviewer.org.
SVC(C=1000000.0, kernel='linear')
0], phi(X)[:, 1], c = y, zorder=10, cmap =cm.Paired, edgecolors='k', alpha=1, s=200)
plt.scatter(phi(X)[:, = phi(X)[:, 0].min()-1
x_min = phi(X)[:, 0].max()+1
x_max = phi(X)[:, 1].min()-1
y_min = phi(X)[:, 1].max()+1
y_max
= np.mgrid[x_min:x_max:200j, y_min:y_max:200j]
XX, YY = c.decision_function(np.c_[XX.ravel(), YY.ravel()])
Z
# Put the result into a color plot
= Z.reshape(XX.shape)
Z > 0, cmap=plt.cm.Paired, alpha=0.6)
plt.pcolormesh(XX, YY, Z =['k', 'k', 'k'],
plt.contour(XX, YY, Z, colors=['--', '-', '--'], levels=[-1, 0, 1])
linestylesr"$x_1 = X$")
plt.xlabel(r"$x_2 = X^2$")
plt.ylabel(r"Decision surface: {:0.1f}*x_1 + {:0.1f}*x_2 + {:0.1f} = 0".format(c.coef_[0, 0], c.coef_[0, 1], c.intercept_[0])) plt.title(
Text(0.5, 1.0, 'Decision surface: 1.3*x_1 + -0.7*x_2 + 1.0 = 0')
# Now using non-linearly separable data in 2D
from sklearn.datasets import make_blobs, make_circles
= make_circles(n_samples=100, factor=0.5, noise=0.4)
X, y
0], X[:, 1], c=y, cmap=cm.Paired) plt.scatter(X[:,
def plot_contour(X, y, clf):
= np.meshgrid(np.linspace(X[:, 0].min(), X[:, 0].max(), 40), np.linspace(X[:, 1].min(), X[:, 1].max(), 40))
xx, yy = clf.decision_function(np.c_[xx.ravel(), yy.ravel()])
Z = Z.reshape(xx.shape)
Z ='k', levels=[-1, 0, 1], alpha=0.9,
plt.contour(xx, yy, Z, colors=['--', '-', '--'])
linestyles
0], c.support_vectors_[:, 1], s=100,
plt.scatter(c.support_vectors_[:, =1, facecolors='none', edgecolors='k')
linewidth0], X[:, 1], c=y, cmap=cm.Paired)
plt.scatter(X[:,
> 0, cmap=plt.cm.Paired, alpha=0.2)
plt.pcolormesh(xx, yy, Z
# Train accuracy
= clf.predict(X)
y_pred "Train accuracy: {:0.2f}\n Number of support vectors: {}".format((y_pred == y).mean(), len(clf.support_vectors_)))
plt.title(
# First linear SVM
= SVC(kernel='linear', C=10)
c
c.fit(X, y)
plot_contour(X, y, c)
= SVC(kernel='poly', degree=2, C=10)
c_poly2
c_poly2.fit(X, y)
plot_contour(X, y, c_poly2)
# RBF kernel
= SVC(kernel='rbf', C=10, gamma=1e-1)
c_rbf
c_rbf.fit(X, y)
plot_contour(X, y, c_rbf)
# interactive widget to show the effect of gamma
from ipywidgets import interact
def plot_svm(gamma=0.05):
= SVC(kernel='rbf', C=10, gamma=gamma)
c
c.fit(X, y)
plot_contour(X, y, c)
=(0.01, 60, 0.05)) interact(plot_svm, gamma
<function __main__.plot_svm(gamma=0.05)>