Regularised Logistic Regression.


In [1]:
import numpy as np  #Importing required modules and libraries

from math import *
from numpy import linalg

import matplotlib.pyplot as plt

In [2]:
train_data = np.genfromtxt('ex2data2.txt',delimiter=',')
print "Target Values:-"
y = train_data[:,-1]
print y
X= train_data[:,0:2]
m = len(y)
X= np.insert(X,0,np.ones(m),axis = 1)
alpha = 0.1
theta = np.random.random(3)


Target Values:-
[ 1.  1.  1.  1.  1.  1.  1.  1.  1.  1.  1.  1.  1.  1.  1.  1.  1.  1.
  1.  1.  1.  1.  1.  1.  1.  1.  1.  1.  1.  1.  1.  1.  1.  1.  1.  1.
  1.  1.  1.  1.  1.  1.  1.  1.  1.  1.  1.  1.  1.  1.  1.  1.  1.  1.
  1.  1.  1.  1.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.
  0.  0.  0.  0.  0.  0.  0.  0.  0.  0.]

In [3]:
def calcost(hypothesis,y,theta):
    loghyp = np.log(hypothesis)
    sum  = (np.dot(y,loghyp)) + np.dot((1-y),np.log(1-hypothesis)) - 2*np.sum(theta[1:]**2)  
    return -sum/m

In [4]:
def sigmoid(z):
    return 1/(1 + np.exp(-z))

In [5]:
prod = np.dot(X,theta.transpose())
hypothesis = sigmoid(prod)
oldcost = calcost(hypothesis,y,theta)
diff = hypothesis - y

In [6]:
for i in range(300):
    theta = theta*(1-(alpha*2)/m) - (alpha/m)*(np.sum(np.dot(diff,X)))
    theta[0] = theta[0] - (alpha/m)*(np.sum(np.dot(diff,X)))
    prod = np.dot(X,theta.transpose())
    hypothesis = 1/(1 + np.exp(-prod))
    diff = hypothesis - y
    newcost = calcost(hypothesis,y,theta)
    print newcost


0.78965013093
0.779921637419
0.771518740213
0.76428429299
0.758073708903
0.752755550512
0.74821158429
0.744336424352
0.741036881749
0.73823112056
0.735847704003
0.73382459554
0.73210816311
0.730652220251
0.729417126047
0.728368956718
0.727478754805
0.726721857005
0.726077298408
0.725527288718
0.725056754874
0.724652943855
0.724305079356
0.724004066195
0.723742236622
0.723513133225
0.723311323557
0.723132242194
0.722972056373
0.722827551874
0.722696036217
0.722575256629
0.722463330617
0.722358687236
0.722260017474
0.722166232342
0.722076427519
0.721989853531
0.721905890628
0.721824027616
0.721743844048
0.721664995245
0.721587199695
0.721510228486
0.721433896419
0.721358054565
0.721282584026
0.721207390712
0.721132400968
0.721057557927
0.720982818465
0.72090815066
0.720833531683
0.720758946042
0.720684384129
0.720609841014
0.720535315456
0.720460809081
0.720386325714
0.720311870831
0.720237451109
0.720163074067
0.720088747774
0.720014480615
0.719940281107
0.719866157751
0.719792118923
0.719718172789
0.719644327234
0.719570589824
0.71949696777
0.719423467909
0.719350096694
0.719276860187
0.719203764065
0.719130813625
0.719058013795
0.718985369143
0.718912883896
0.718840561951
0.718768406895
0.718696422017
0.718624610328
0.718552974577
0.718481517264
0.718410240662
0.718339146823
0.7182682376
0.718197514658
0.718126979487
0.718056633415
0.717986477617
0.717916513131
0.717846740864
0.717777161603
0.717707776026
0.717638584708
0.717569588127
0.717500786678
0.717432180673
0.717363770351
0.717295555882
0.717227537375
0.71715971488
0.717092088394
0.717024657866
0.716957423199
0.716890384258
0.716823540866
0.716756892815
0.716690439865
0.716624181745
0.716558118159
0.716492248787
0.716426573287
0.716361091295
0.716295802432
0.716230706298
0.716165802481
0.716101090554
0.716036570076
0.715972240598
0.715908101657
0.715844152782
0.715780393494
0.715716823305
0.715653441722
0.715590248245
0.715527242367
0.715464423577
0.715401791362
0.7153393452
0.715277084571
0.715215008949
0.715153117804
0.715091410608
0.715029886827
0.714968545928
0.714907387375
0.714846410632
0.71478561516
0.714725000424
0.714664565882
0.714604310998
0.714544235232
0.714484338044
0.714424618897
0.714365077251
0.714305712568
0.714246524312
0.714187511944
0.714128674929
0.71407001273
0.714011524814
0.713953210646
0.713895069693
0.713837101424
0.713779305307
0.713721680814
0.713664227415
0.713606944583
0.713549831792
0.713492888518
0.713436114237
0.713379508426
0.713323070565
0.713266800135
0.713210696616
0.713154759494
0.713098988251
0.713043382375
0.712987941353
0.712932664673
0.712877551827
0.712822602307
0.712767815606
0.712713191218
0.71265872864
0.712604427371
0.712550286909
0.712496306756
0.712442486413
0.712388825385
0.712335323177
0.712281979297
0.712228793251
0.712175764551
0.712122892708
0.712070177234
0.712017617645
0.711965213455
0.711912964183
0.711860869348
0.711808928469
0.711757141068
0.71170550667
0.711654024799
0.711602694982
0.711551516745
0.711500489619
0.711449613135
0.711398886824
0.711348310221
0.71129788286
0.711247604278
0.711197474014
0.711147491607
0.711097656598
0.711047968529
0.710998426945
0.710949031391
0.710899781413
0.71085067656
0.710801716382
0.710752900429
0.710704228255
0.710655699412
0.710607313457
0.710559069946
0.710510968436
0.710463008489
0.710415189664
0.710367511524
0.710319973633
0.710272575555
0.710225316858
0.710178197108
0.710131215876
0.710084372731
0.710037667246
0.709991098994
0.709944667549
0.709898372488
0.709852213388
0.709806189827
0.709760301386
0.709714547646
0.709668928188
0.709623442599
0.709578090462
0.709532871364
0.709487784894
0.70944283064
0.709398008193
0.709353317144
0.709308757088
0.709264327618
0.70922002833
0.709175858821
0.709131818689
0.709087907534
0.709044124956
0.709000470558
0.708956943943
0.708913544715
0.708870272481
0.708827126847
0.708784107422
0.708741213815
0.708698445638
0.708655802501
0.708613284019
0.708570889806
0.708528619478
0.708486472651
0.708444448944
0.708402547976
0.708360769368
0.708319112742
0.70827757772
0.708236163926
0.708194870987
0.708153698528
0.708112646178
0.708071713564
0.708030900319
0.707990206071
0.707949630455
0.707909173104
0.707868833652
0.707828611735
0.707788506991
0.707748519058
0.707708647574
0.707668892182
0.707629252521
0.707589728236
0.707550318969
0.707511024367
0.707471844075
0.70743277774
0.707393825012
0.707354985539
0.707316258972
0.707277644964
0.707239143166
0.707200753233
0.70716247482
0.707124307583
0.70708625118

In [7]:
print "Values of theta:- ",theta
prod = np.dot(X,theta.transpose())
predicted = sigmoid(prod)
print "The predicted values are as follows:- "
print predicted


Values of theta:-  [-0.19637577  0.34728744  0.08492231]
The predicted values are as follows:- 
[ 0.47024442  0.45749851  0.4472453   0.42948285  0.41700563  0.41076469
  0.4178289   0.42093645  0.44400998  0.45160057  0.47239614  0.48580625
  0.50120165  0.51094393  0.50871861  0.49123253  0.4768137   0.46425133
  0.45086318  0.42340826  0.41195006  0.40461726  0.40078586  0.40956438
  0.43283519  0.45570463  0.48000082  0.49799527  0.45060801  0.44216923
  0.44847319  0.45400739  0.44978754  0.43673218  0.4265109   0.42597146
  0.42634367  0.44169474  0.45303549  0.46219603  0.47153548  0.48894162
  0.50387527  0.49113058  0.50873633  0.50656372  0.52327526  0.48149692
  0.50359676  0.49355992  0.47509319  0.45893766  0.45472652  0.44068988
  0.43686839  0.41819799  0.43191979  0.43346173  0.48668191  0.4869233
  0.49000856  0.51095931  0.51983166  0.51604285  0.52800498  0.53900966
  0.52819512  0.53613015  0.53259377  0.52410262  0.52446404  0.51706191
  0.51180686  0.50803096  0.48665078  0.48325656  0.48248045  0.46909654
  0.45960832  0.44258069  0.45170313  0.430297    0.40768094  0.41228603
  0.38306644  0.39290962  0.39317257  0.40601358  0.43129209  0.43768143
  0.42657706  0.41906952  0.47643494  0.47538518  0.46298176  0.46744302
  0.49893977  0.50285514  0.52523529  0.53638599  0.52296536  0.54588041
  0.43491941  0.41756191  0.430499    0.40317395  0.40854631  0.42512926
  0.39785657  0.39059236  0.39169397  0.38801752  0.38742672  0.40109925
  0.41081434  0.4305129   0.4716016   0.50518381]