In [2]:
import numpy as np
import pandas as pd
%pylab inline


Populating the interactive namespace from numpy and matplotlib

In [20]:
X = np.random.normal(scale = 5, size = (1000,1))
y = 5 - (X-3)*(X-3) + np.random.normal(scale=10, size=(1000,1))

In [21]:
class LinearRegression:
    def __init__(self, lr = 0.01, max_iter = 1000000):
        self.k = np.random.random()
        self.b = np.random.random()
        self.lr = lr
        self.max_iter = max_iter
        
    def predict(self, x):
        return x*self.k+self.b
    
    def fit(self, data, answers):
        N = data.shape[0]
        step = 1
        it = 0
        while step > 1e-4 and it<self.max_iter:
            delta = (self.k*data+self.b-answers)
            mse = 1/N * delta.T.dot(delta)
            dk = 2/N * data.T.dot(delta)
            db = 2/N * np.sum(delta.T)
            self.k = self.k - self.lr * dk[0,0]
            self.b = self.b - self.lr * db
            step = np.sqrt(dk*dk+db*db)*self.lr
            it += 1

In [22]:
lr = LinearRegression(lr = 0.001)

In [23]:
lr.fit(X, y)

In [25]:
plot(X, X*lr.k+lr.b, c="red")
scatter(X, y, marker = "+", alpha = 0.3)


Out[25]:
<matplotlib.collections.PathCollection at 0x1d2f5e73f98>

In [ ]:


In [26]:
from sklearn.datasets import load_iris

In [29]:
data = load_iris()

In [70]:
X, y = data.data, (data.target==1).astype(int)

In [71]:
scatter(X[:,2], X[:,3], c=y)


Out[71]:
<matplotlib.collections.PathCollection at 0x1d2faf3a5c0>

In [72]:
from sklearn.linear_model import LogisticRegression

In [73]:
lr = LogisticRegression()

In [74]:
lr.fit(X, y)


Out[74]:
LogisticRegression(C=1.0, class_weight=None, dual=False, fit_intercept=True,
          intercept_scaling=1, max_iter=100, multi_class='ovr', n_jobs=1,
          penalty='l2', random_state=None, solver='liblinear', tol=0.0001,
          verbose=0, warm_start=False)

In [77]:
f


Out[77]:
array([ True,  True,  True,  True,  True,  True,  True,  True,  True,
        True,  True,  True,  True,  True,  True,  True,  True,  True,
        True,  True,  True,  True,  True,  True,  True,  True,  True,
        True,  True,  True,  True,  True,  True,  True,  True,  True,
        True,  True,  True,  True,  True,  True,  True,  True,  True,
        True,  True,  True,  True,  True, False, False, False,  True,
       False, False, False, False,  True, False,  True, False,  True,
       False, False, False, False,  True,  True,  True, False, False,
        True,  True, False, False,  True, False, False, False,  True,
        True, False, False, False, False, False,  True, False, False,
        True, False, False,  True, False, False, False, False, False,
       False,  True,  True,  True,  True,  True, False,  True, False,
       False,  True,  True,  True,  True,  True,  True,  True,  True,
        True, False, False,  True,  True, False,  True,  True,  True,
        True,  True,  True, False, False,  True,  True, False, False,
        True,  True,  True,  True,  True,  True,  True,  True,  True,
        True,  True,  True,  True,  True,  True])

In [ ]: