Logistic Regression


In [1]:
%load_ext watermark
%watermark -a 'Sebastian Raschka' -d -v -p matplotlib


Sebastian Raschka 02/04/2015 

CPython 3.4.3
IPython 3.0.0

matplotlib 1.4.3

In [2]:
import sys
sys.path = ['/Users/sebastian/github/mlxtend'] + sys.path

import mlxtend
from mlxtend.data import iris_data
from mlxtend.classifier import LogisticRegression
from mlxtend.evaluate import plot_decision_regions
mlxtend.__version__


Out[2]:
'0.2.3'

Loading Iris Data


In [3]:
X, y = iris_data()
X = X[:, [0, 3]] # sepal length and petal width
X = X[0:100] # class 0 and class 1
y = y[0:100] # class 0 and class 1

# standardize
X[:,0] = (X[:,0] - X[:,0].mean()) / X[:,0].std()
X[:,1] = (X[:,1] - X[:,1].mean()) / X[:,1].std()



Gradient Descent


In [4]:
import matplotlib.pyplot as plt
%matplotlib inline
from mlxtend.evaluate import plot_decision_regions

lr = LogisticRegression(eta=0.01, epochs=100, learning='gd')
lr.fit(X, y)

plot_decision_regions(X, y, clf=lr)
plt.title('Logistic Regression - Gradient Descent')
plt.show()

print(lr.w_)

plt.plot(range(len(lr.cost_)), lr.cost_)
plt.xlabel('Iterations')
plt.ylabel('Cost')
plt.show()


[ 0.51854085  1.20034182  4.3983604 ]



Stochastic Gradient Descent


In [5]:
import matplotlib.pyplot as plt
%matplotlib inline
from mlxtend.evaluate import plot_decision_regions

lr = LogisticRegression(eta=0.01, epochs=100, learning='sgd')
lr.fit(X, y)

plot_decision_regions(X, y, clf=lr)
plt.title('Logistic Regression - Stochastic Gradient Descent')
plt.show()

print(lr.w_)

plt.plot(range(len(lr.cost_)), lr.cost_)
plt.xlabel('Iterations')
plt.ylabel('Cost')
plt.show()


[ 0.51379961  1.17885202  4.38019563]




In [ ]: