Regularised Logistic Regression.
In [1]:
import numpy as np #Importing required modules and libraries
from math import *
from numpy import linalg
import matplotlib.pyplot as plt
In [2]:
train_data = np.genfromtxt('ex2data2.txt',delimiter=',')
print "Target Values:-"
y = train_data[:,-1]
print y
X= train_data[:,0:2]
m = len(y)
X= np.insert(X,0,np.ones(m),axis = 1)
alpha = 0.1
theta = np.random.random(3)
In [3]:
def calcost(hypothesis,y,theta):
loghyp = np.log(hypothesis)
sum = (np.dot(y,loghyp)) + np.dot((1-y),np.log(1-hypothesis)) - 2*np.sum(theta[1:]**2)
return -sum/m
In [4]:
def sigmoid(z):
return 1/(1 + np.exp(-z))
In [5]:
prod = np.dot(X,theta.transpose())
hypothesis = sigmoid(prod)
oldcost = calcost(hypothesis,y,theta)
diff = hypothesis - y
In [6]:
for i in range(300):
theta = theta*(1-(alpha*2)/m) - (alpha/m)*(np.sum(np.dot(diff,X)))
theta[0] = theta[0] - (alpha/m)*(np.sum(np.dot(diff,X)))
prod = np.dot(X,theta.transpose())
hypothesis = 1/(1 + np.exp(-prod))
diff = hypothesis - y
newcost = calcost(hypothesis,y,theta)
print newcost
In [7]:
print "Values of theta:- ",theta
prod = np.dot(X,theta.transpose())
predicted = sigmoid(prod)
print "The predicted values are as follows:- "
print predicted