In [1]:
%load_ext watermark
%watermark -a 'Sebastian Raschka' -d -v -p matplotlib
In [2]:
import sys
sys.path = ['/Users/sebastian/github/mlxtend'] + sys.path
import mlxtend
from mlxtend.data import iris_data
from mlxtend.evaluate import plot_decision_regions
from mlxtend.classifier import Adaline
mlxtend.__version__
Out[2]:
In [3]:
import numpy as np
X, y = iris_data()
X = X[:, [0, 3]] # sepal length and petal width
X = X[0:100] # class 0 and class 1
y = np.where(y[0:100] == 0, -1, 1) # class -1 and class 1
# standardize
X_std = np.copy(X)
X_std[:,0] = (X[:,0] - X[:,0].mean()) / X[:,0].std()
X_std[:,1] = (X[:,1] - X[:,1].mean()) / X[:,1].std()
In [4]:
%matplotlib inline
import matplotlib.pyplot as plt
## Standardized
ada = Adaline(epochs=30, eta=0.01, learning='gd', random_seed=1)
ada.fit(X_std, y)
print(ada.w_)
plot_decision_regions(X_std, y, clf=ada)
plt.title('Adaline - Gradient Descent')
plt.show()
plt.plot(range(len(ada.cost_)), ada.cost_, marker='o')
plt.xlabel('Iterations')
plt.ylabel('Cost')
plt.show()
## Non Standardized
ada = Adaline(epochs=500, eta=0.0001, learning='gd', random_seed=1)
ada.fit(X, y)
print(ada.w_)
plot_decision_regions(X, y, clf=ada)
plt.title('Adaline - Gradient Descent')
plt.show()
plt.plot(range(len(ada.cost_)), ada.cost_, )
plt.xlabel('Iterations')
plt.ylabel('Cost')
plt.show()
In [5]:
%matplotlib inline
import matplotlib.pyplot as plt
## Standardized
ada = Adaline(epochs=30, eta=0.01, learning='sgd', random_seed=1)
ada.fit(X_std, y)
print(ada.w_)
plot_decision_regions(X_std, y, clf=ada)
plt.title('Adaline - Stochastic Gradient Descent')
plt.show()
plt.plot(range(len(ada.cost_)), ada.cost_, marker='o')
plt.xlabel('Iterations')
plt.ylabel('Cost')
plt.show()
## Non Standardized
ada = Adaline(epochs=500, eta=0.0001, learning='sgd', random_seed=1)
ada.fit(X, y)
print(ada.w_)
plot_decision_regions(X, y, clf=ada)
plt.title('Adaline - Stochastic Gradient Descent')
plt.show()
plt.plot(range(len(ada.cost_)), ada.cost_, marker='o')
plt.xlabel('Iterations')
plt.ylabel('Cost')
plt.show()
In [6]:
X, y = iris_data()
X = X[:, [0, 1]] # sepal length and sepal width
X = X[50:150] # class 0 and class 1
y = np.where(y[50:150] == 2, -1, 1) # class -1 and class 1
# standardize
X_std = np.copy(X)
X_std[:,0] = (X[:,0] - X[:,0].mean()) / X[:,0].std()
X_std[:,1] = (X[:,1] - X[:,1].mean()) / X[:,1].std()
In [11]:
%matplotlib inline
import matplotlib.pyplot as plt
## Standardized
ppn = Adaline(epochs=30, eta=0.01, learning='gd', random_seed=1)
ppn.fit(X_std, y)
print(ppn.w_)
plot_decision_regions(X_std, y, clf=ppn)
plt.title('Adaline - Standardized, y {-1, 1}')
plt.show()
plt.plot(range(len(ppn.cost_)), ppn.cost_,)
plt.xlabel('Iterations')
plt.ylabel('Cost')
plt.show()
## Non Standardized
ada = Adaline(epochs=500, eta=0.0001, learning='gd', random_seed=1)
ada.fit(X, y)
print(ada.w_)
plot_decision_regions(X, y, clf=ada)
plt.title('Adaline - Non-Standardized, y {-1, 1}')
plt.show()
plt.plot(range(len(ada.cost_)), ada.cost_,)
plt.xlabel('Iterations')
plt.ylabel('Cost')
plt.show()
In [ ]: