In [30]:
# Import libraries
%matplotlib inline
from sklearn import datasets
import matplotlib.pyplot as plt
import numpy as np
In [31]:
# import some data to play with
iris = datasets.load_iris()
X = iris.data[:100,:2] # we only take the first two features.
Y = iris.target[:100]
In [34]:
# Line parameters
m = 1.1 # Slope
c = -2.75 # Intercept
# Creating line
x1 = np.linspace(np.min(X[:,0]), np.max(X[:,0]), 100)
y1 = m*x1 + c
In [35]:
plt.figure(2, figsize=(8, 6))
plt.clf()
# Plot the training points
plt.scatter(X[:, 0], X[:, 1], c=Y, cmap=plt.cm.Paired)
plt.plot(x1,y1)
plt.xlabel('Sepal length')
plt.ylabel('Sepal width')
Out[35]:
In [36]:
# Create perceptron
threshold = 1
# Get weights using the line equations from above
w2 = threshold / c
w1 = -1 * w2 * m
# Perceptron calculation
plant_class = ((X[:,0] * w1 + X[:,1] * w2) > threshold) + 0
# Compare
correctly_classified = np.all(plant_class == Y)
In [37]:
correctly_classified
Out[37]:
In [38]:
from random import choice
from numpy import array, dot, random
def unit_step(x,theta):
''' This is our activation function. '''
fx = 0
if x < theta:
fx= 0
else:
fx= 1
return fx
training_data = [
array([0,0,1]),
array([0,1,1]),
array([1,0,1]),
array([1,1,1]),
]
target = [0,1,1,1]
w = random.rand(3) # Initializing weights randomly.
errors = []
eta = 0.2 # Learning rate
n = 100 # Number of iterations
theta = 0.25 # Threshold
for i in range(n):
x, expected = choice(list(zip(training_data,target)))
result = dot(w, x)
error = expected - unit_step(result,theta)
errors.append(error)
w += eta * error * x
for x, _ in zip(training_data,target):
result = dot(x, w)
print("{}: {} -> {}".format(x[:2], result, unit_step(result,theta)))
In [25]:
from pylab import plot, ylim
ylim([-1,1])
plot(errors)
Out[25]:
In [26]:
k = np.linspace(0,1,100)
plt.plot(k,-(w[0]/w[1])*k + theta/w[1] - w[2]/w[1])
plt.xlim((0,1))
plt.ylim((0,1))
Out[26]:
In [39]:
import itertools
training_data = X
training_data = [np.append(x,[1]) for x in training_data]
target = Y
w = random.rand(3) # Initializing weights randomly.
errors = []
eta = 0.2 # Learning rate
n = 100000 # Number of iterations
theta = 0.25 # Threshold
data = itertools.cycle(zip(training_data,target))
for i in range(n):
x, expected = next(data)
result = dot(w, x)
error = expected - unit_step(result,theta)
errors.append(error)
w += eta * error * x
for x, _ in zip(training_data,target):
result = dot(x, w)
print("{}: {} -> {}".format(x[:2], result, unit_step(result,theta)))
In [40]:
plt.scatter(X[:, 0], X[:, 1], c=Y, cmap=plt.cm.Paired)
plt.plot(X[:,0],-(w[0]/w[1])*X[:,0] + theta/w[1] - w[2]/w[1])
plt.xlabel('Sepal length')
plt.ylabel('Sepal width')
Out[40]:
In [41]:
from pylab import plot, ylim
ylim([-1,1])
plot(errors)
Out[41]:
In [42]:
# Perceptron calculation
plant_class = ((X[:,0] * w[0] + X[:,1] * w[1] + w[2]) > theta) + 0
# Compare
correctly_classified = np.all(plant_class == Y)
correctly_classified
Out[42]:
In [43]:
-(w[0]/w[1])
Out[43]:
In [44]:
theta/w[1] - w[2]/w[1]
Out[44]:
In [ ]: