# generate training data
import numpy as np
import matplotlib.pyplot as plt
from matplotlib.colors import ListedColormap
from sklearn import neighbors, datasets
N = 50
x1 = np.zeros((2,N))
x2 = np.zeros((2,N))
X = np.zeros((2,2*N))
x1[0] = np.random.uniform(-3,0,N)
x1[1] = 3.+2.*x1[0] + np.random.uniform(-2.5,1.5,N)
x2[0] = np.random.uniform(-4,4,N)
x2[1] = -2.+0.125*x2[0] + np.random.uniform(-1.5,1.5,N)
X[0] = np.r_[x1[0],x2[0]]
X[1] = np.r_[x1[1],x2[1]]
X = X.T
y = np.r_[np.ones(N),-np.ones(N)]
n_neighbors = 1
h = .02 # step size in the mesh
# Create color maps
cmap_light = ListedColormap(['#FFAAAA', '#AAFFAA', '#AAAAFF'])
cmap_bold = ListedColormap(['#FF0000', '#00FF00', '#0000FF'])
for weights in ['uniform', 'distance']:
# we create an instance of Neighbours Classifier and fit the data.
clf = neighbors.KNeighborsClassifier(n_neighbors, weights=weights)
clf.fit(X, y)
# Plot the decision boundary. For that, we will assign a color to each
# point in the mesh [x_min, x_max]x[y_min, y_max].
x_min, x_max = X[:, 0].min() - 1, X[:, 0].max() + 1
y_min, y_max = X[:, 1].min() - 1, X[:, 1].max() + 1
xx, yy = np.meshgrid(np.arange(x_min, x_max, h),
np.arange(y_min, y_max, h))
Z = clf.predict(np.c_[xx.ravel(), yy.ravel()])
# Put the result into a color plot
Z = Z.reshape(xx.shape)
plt.figure()
plt.pcolormesh(xx, yy, Z, cmap=cmap_light)
# Plot also the training points
plt.scatter(X[:, 0], X[:, 1], c=y, cmap=cmap_bold,
edgecolor='k', s=20)
plt.xlim(xx.min(), xx.max())
plt.ylim(yy.min(), yy.max())
plt.title("2-Class classification (k = %i)"
% (n_neighbors))
plt.show()
plt.scatter(X[:, 0], X[:, 1], c=y, cmap=cmap_bold, edgecolor='k', s=20)
plt.show()
# we create an instance of linear regresor and fit the data.
from sklearn.linear_model import LinearRegression
linreg = LinearRegression(normalize=True)
linreg.fit(X,y)
# Plot the decision boundary. For that, we will assign a color to each
# point in the mesh [x_min, x_max]x[y_min, y_max].
x_min, x_max = X[:, 0].min() - 1, X[:, 0].max() + 1
y_min, y_max = X[:, 1].min() - 1, X[:, 1].max() + 1
xx, yy = np.meshgrid(np.arange(x_min, x_max, h),
np.arange(y_min, y_max, h))
Z = np.sign(linreg.predict(np.c_[xx.ravel(), yy.ravel()]))
# Put the result into a color plot
Z = Z.reshape(xx.shape)
plt.figure()
plt.pcolormesh(xx, yy, Z, cmap=cmap_light)
# Plot also the training points
plt.scatter(X[:, 0], X[:, 1], c=y, cmap=cmap_bold,
edgecolor='k', s=20)
plt.xlim(xx.min(), xx.max())
plt.ylim(yy.min(), yy.max())
plt.title("2-Class classification (k = %i)"
% (n_neighbors))
plt.show()
t=np.linspace(-3,3,1000)
y = 0.5*(np.sign(t)+1)
plt.plot(t,y,'b-')
plt.grid(True)
plt.axis([-3., 3., 0., 3.])
plt.show()
t=np.linspace(-3,3,1000)
y = 0.5*(np.sign(t)+1)
plt.plot(-t,y,'b-')
plt.grid(True)
plt.axis([-3., 3., 0., 3.])
plt.show()
t=np.linspace(-3,3,1000)
y = 0.5*(np.sign(t)+1)
plt.plot(t,y,'k-')
plt.plot(t,1./(1.+np.exp(-t)),'b-')
plt.grid(True)
plt.axis([-3., 3., 0., 3.])
plt.show()
t=np.linspace(-3,3,1000)
y = 0.5*(np.sign(t)+1)
plt.plot(-t,y,'k-')
plt.plot(t,1./(1.+np.exp(t)),'b-')
plt.grid(True)
plt.axis([-3., 3., 0., 3.])
plt.show()
t=np.linspace(-3,3,1000)
y = 0.5*(np.sign(t)+1)
plt.plot(t,y,'k-')
plt.plot(t,np.log(1.+np.exp(t)),'b-')
plt.grid(True)
plt.axis([-3., 3., 0., 3.])
plt.show()
t=np.linspace(-3,3,1000)
y = 0.5*(np.sign(t)+1)
plt.plot(-t,1*y,'k-')
plt.plot(t,1.*np.log(1.+np.exp(-t)),'b-')
plt.grid(True)
plt.axis([-3., 3., 0., 3.])
plt.show()
t=np.linspace(-3,3,1000)
y = 0.5*(np.sign(t)+1)
plt.plot(t,y,'k-')
plt.plot(t,np.positive(1.+t),'b-')
plt.grid(True)
plt.axis([-3., 3., 0., 3.])
plt.show()
t=np.linspace(-3,3,1000)
y = 0.5*(np.sign(t)+1)
plt.plot(-t,1*y,'k-')
plt.plot(t,1.*np.positive(1.-t),'b-')
plt.grid(True)
plt.axis([-3., 3., 0., 3.])
plt.show()
t=np.linspace(-3,3,1000)
y = 0.5*(np.sign(t)+1)
plt.plot(t,y,'k-')
plt.plot(t,(1.+t)**2,'b-')
plt.grid(True)
plt.axis([-3., 3., 0., 3.])
plt.show()
t=np.linspace(-3,3,1000)
y = 0.5*(np.sign(t)+1)
plt.plot(-t,1*y,'k-')
plt.plot(t,1.*(1.-t)**2,'b-')
plt.grid(True)
plt.axis([-3., 3., 0., 3.])
plt.show()
# generate training data
import numpy as np
import matplotlib.pyplot as plt
from matplotlib.colors import ListedColormap
from sklearn import neighbors, datasets
from sklearn import svm
N = 50
x1 = np.zeros((2,N))
x2 = np.zeros((2,N))
X = np.zeros((2,2*N))
x1[0] = np.random.uniform(-2,1,N)
x1[1] = 2.+1.*x1[0] + np.random.uniform(-4,5,N)
x2[0] = np.random.uniform(-4,4,N)
x2[1] = -3.2+1.*x2[0] + np.random.uniform(-5,4,N)
X[0] = np.r_[x1[0],x2[0]]
X[1] = np.r_[x1[1],x2[1]]
X = X.T
y = np.r_[np.ones(N),-np.ones(N)]
plt.scatter(X[:, 0], X[:, 1], c=y, cmap=cmap_bold, edgecolor='k', s=20)
plt.show()
n_neighbors = 1
h = .02 # step size in the mesh
# Create color maps
cmap_light = ListedColormap(['#FFAAAA', '#AAAAFF'])
cmap_bold = ListedColormap(['#FF0000', '#0000FF'])
M=30
Cfp = np.zeros(M)
Cfn = np.zeros(M)
for i in np.arange(M):
k = (1/float(32))*2**(10*i/float(M-1))
# we create an instance of Neighbours Classifier and fit the data.
clf = svm.SVC(gamma='scale',kernel='linear',class_weight={-1:1,1:k})
clf.fit(X, y)
# Plot the decision boundary. For that, we will assign a color to each
# point in the mesh [x_min, x_max]x[y_min, y_max].
x_min, x_max = X[:, 0].min() - 1, X[:, 0].max() + 1
y_min, y_max = X[:, 1].min() - 1, X[:, 1].max() + 1
xx, yy = np.meshgrid(np.arange(x_min, x_max, h),
np.arange(y_min, y_max, h))
xy = np.vstack([xx.ravel(), yy.ravel()]).T
#Z = clf.decision_function(xy).reshape(xx.shape)
Z = clf.predict(np.c_[xx.ravel(), yy.ravel()])
ypred = clf.predict(X)
Cfp[i] = np.sum(np.absolute(y-ypred)*(1-y)*0.25)
Cfn[i] = np.sum(np.absolute(y-ypred)*(y+1)*0.25)
# Put the result into a color plot
Z = Z.reshape(xx.shape)
plt.figure()
plt.pcolormesh(xx, yy, Z, cmap=cmap_light)
# Plot also the training points
plt.scatter(X[:, 0], X[:, 1], c=y, cmap=cmap_bold,
edgecolor='k', s=20)
plt.xlim(xx.min(), xx.max())
plt.ylim(yy.min(), yy.max())
# plot decision boundary and margins
ax = plt.gca()
# plot support vectors
#ax.contour(xx, yy, Z, colors='k', levels=[-1, 0, 1], alpha=0.5,
# linestyles=['--', '-', '--'])
#ax.scatter(clf.support_vectors_[:, 0], clf.support_vectors_[:, 1], s=100,
# linewidth=1, facecolors='none', edgecolors='k')
plt.show()
plt.plot(np.append(1/float(N)*Cfn,1),np.append(Cfp/float(N),0),'ob-')
plt.show()
xxx=np.append(np.zeros(1)+1,1/float(N)*Cfn)
xxx=np.append(xxx,np.zeros(1)+0)
yyy=np.append(np.zeros(1)+0,1/float(N)*Cfp)
yyy=np.append(yyy,np.zeros(1)+1)
plt.plot(xxx,yyy,'ob-')
plt.show()