import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
from matplotlib import cm
%matplotlib inline
SVM Soft Margin
SVM Soft Margin
from sklearn.datasets import make_blobs
= make_blobs(centers=2, n_samples=100, random_state=0, cluster_std=1.5) X, y
0], X[:, 1], c=y, cmap=cm.autumn)
plt.scatter(X[:, 'equal') plt.gca().set_aspect(
from sklearn import svm
= X
X_train = y
y_train = 'linear'
kernel for fig_num, C in enumerate([0.0001, 0.001, 0.1, 1, 100, 10000][:]):
= svm.SVC(kernel=kernel,C = C )
clf
clf.fit(X, y)
plt.figure(fig_num)
plt.clf()0], X[:, 1], c=y, zorder=10, cmap=plt.cm.autumn,
plt.scatter(X[:, ='k',s=80)
edgecolor
'tight')
plt.axis(= X[:, 0].min()-1
x_min = X[:, 0].max()+1
x_max = X[:, 1].min()-1
y_min = X[:, 1].max()+1
y_max
= np.mgrid[x_min:x_max:200j, y_min:y_max:200j]
XX, YY = clf.decision_function(np.c_[XX.ravel(), YY.ravel()])
Z
# Put the result into a color plot
= Z.reshape(XX.shape)
Z #plt.pcolormesh(XX, YY, Z > 0, cmap=plt.cm.autumn, alpha=0.2)
=['k', 'k', 'k'],
plt.contour(XX, YY, Z, colors=['--', '-', '--'], levels=[-1, 0, 1])
linestyles0],
plt.scatter(clf.support_vectors_[:, 1],
clf.support_vectors_[:, =5, lw=0.001, facecolors='none',zorder=10,c='k', edgecolors='none');
s
"Linear Kernel with C= {}\n Number of support vectors = {}".format(C, len(clf.support_vectors_))) plt.title(