In [65]:
import pandas as pd
import numpy as np
import seaborn as sns
import matplotlib.pyplot as plt
import math
from __future__ import division
from math import *

sns.set_style("whitegrid")
%matplotlib inline

In [66]:
df = pd.read_csv("ex2data1.txt",header=None)
df.columns = ['first','second','result']
df.head()


Out[66]:
first second result
0 34.623660 78.024693 0
1 30.286711 43.894998 0
2 35.847409 72.902198 0
3 60.182599 86.308552 1
4 79.032736 75.344376 1

In [67]:
#inserting ones column to data
m = df.shape[0]
df.insert(0,'3',np.ones(m))

In [68]:
#renaming columns name
df.columns = np.arange(0,4)
df.columns = ['first','second','third','result']
df.head()


Out[68]:
first second third result
0 1.0 34.623660 78.024693 0
1 1.0 30.286711 43.894998 0
2 1.0 35.847409 72.902198 0
3 1.0 60.182599 86.308552 1
4 1.0 79.032736 75.344376 1

In [69]:
sns.lmplot(x='second',y='third',data=df,fit_reg=False,hue='result',size=7, scatter_kws={"s": 100})


Out[69]:
<seaborn.axisgrid.FacetGrid at 0x373b7a5358>

In [162]:
#some of initializations
iterations = 100
theta = pd.Series([-24,0.2,0.2])
X = df[['first','second','third']]
y = df['result']
last_j = pd.Series(np.ones(m))
alpha = 0.001

In [183]:
#functions Sections
def sigmoid(x):
    return ( 1 / ( 1 + e ** ( -1 * x)))

def cost_function(theta,X,y):
    J = 0
    
    # finding hypothesis
    h = pd.Series(np.dot( theta.T, X.T ).T)
    
    # Computing Log(sigmoid(x)) for all of the hypotesis elements
    h1 = sigmoid(h).apply(log)
    
    # Computing Log( 1 - simgoid(x)) for all of the hypotesis elements
    h2 = (1.0000000001 - sigmoid(h)).apply(log)
    
    #Computing Cost of the hypotesis
    J =  ( -1 / m ) * ( y.T.dot(h1) + ( 1 - y ).T.dot(h2))
    
    return J

def gradient_function(theta, X, y):
    # finding hypotesis matrix
    h = pd.Series(np.dot( theta.T, X.T ).T)
    h = sigmoid(h)

    # Computing the Gradient Of the Hypotesis
    grad = ( 1 / m ) * ( ( h - y ).T.dot(X).T )
    
    # reindexing to [0, 1, 2]
    grad.index = [0, 1, 2]
    
    return grad

def gradient_algo(theta, X, y):
    for n in range(iterations):
        
        # finding gradient of each element
        grad = gradient_function(theta, X, y)

        # decreasing theta
        theta = theta - alpha * ( grad )
        
    return theta

In [184]:
theta1 = gradient_function(theta, X, y)
theta1


(3,)
Out[184]:
0    0.042903
1    2.566234
2    2.646797
dtype: float64

In [648]:
sigmoid(1000000)


Out[648]:
1.0

In [ ]: