In [ ]:
import matplotlib.pyplot as plt
%matplotlib inline
import numpy as np
# draw N random points in the [0,1]x[0,1] square
N = 100
x1 = np.random.rand(N)
x2 = np.random.rand(N)
X = np.vstack(zip(np.ones(N),x1, x2))
print X.shape
# use cosine to define positive and negative classes
y = np.array([1 if np.cos(2*np.pi*X[i,1]) / 2 + 0.5 > X[i,2] else 0 for i in range(N)])
p = plt.figure()
p1 = p.add_subplot(121)
p1.plot(x1,x2,'rx')
# create a cosine curve and add to the plot
x = np.arange(0, 1.0, 0.01)
fx = np.cos(2*np.pi*x) / 2 + 0.5
p1.plot(x, fx, lw=2)
# discriminate those above and below the curve
p2 = p.add_subplot(122)
for i in range(N):
if y[i]==1:
p2.plot(x1[i],x2[i],'bo') # o (bolinhas) azuis (blue)
else:
p2.plot(x1[i],x2[i],'ro') # o (bolinhas) vermelhas (red)
p2.plot(x, fx, lw=2)
plt.show()
http://scikit-learn.org/stable/modules/linear_model.html#logistic-regression
In [ ]:
from sklearn.linear_model import LogisticRegression
clf = LogisticRegression(max_iter=100) # default max_iter=100
clf.fit(X,y)
z = clf.predict(X)
print z
for i in range(N):
if z[i] > 0.5:
plt.plot(X[i,1], X[i,2], 'bx')
else:
plt.plot(X[i,1], X[i,2], 'ro')
plt.plot(x, fx, lw=2)
plt.xlabel('x1')
plt.ylabel('x2')
plt.show()
In [ ]:
from sklearn import svm
clf = svm.SVC(gamma=0.3, C=100.)
clf.fit(X,y)
z = clf.predict(X)
print z
for i in range(N):
if z[i] > 0.5:
plt.plot(X[i,1], X[i,2], 'bx')
else:
plt.plot(X[i,1], X[i,2], 'ro')
plt.plot(x, fx, lw=2)
plt.xlabel('x1')
plt.ylabel('x2')
plt.show()
In [ ]:
from sklearn.neural_network import MLPClassifier
clf = MLPClassifier(solver='sgd', learning_rate_init=0.01, max_iter=800, hidden_layer_sizes=(10), random_state=1)
clf.fit(X, y)
z = clf.predict(X)
print z
for i in range(N):
if z[i] > 0.5:
plt.plot(X[i,1], X[i,2], 'bx')
else:
plt.plot(X[i,1], X[i,2], 'ro')
plt.plot(x, fx, lw=2)
plt.xlabel('x1')
plt.ylabel('x2')
plt.show()