In [2]:
import numpy as np
import scipy.stats as st
import sklearn.linear_model as lm
import matplotlib.pyplot as plt
%matplotlib inline

In [3]:
f = lambda x: np.exp(3*x)

In [5]:
x_tr = np.linspace(0., 2, 200)
y_tr = f(x_tr)

In [6]:
x = np.array([0, .1, .2, .5, .8, .9, 1])
y=f(x) + np.random.randn(len(x))

In [9]:
plt.figure(figsize=(12,6))
plt.plot(x_tr[:100], y_tr[:100], '--k')
plt.plot(x, y, 'ok', ms=10)


Out[9]:
[<matplotlib.lines.Line2D at 0x10d9a7898>]

In [12]:
# Create empty model
lr = lm.LinearRegression()
# train the model with our training dataset
lr.fit(x[:, np.newaxis], y)
# Predict points using trained model
y_lr = lr.predict(x_tr[:, np.newaxis])

In [13]:
plt.figure(figsize=(12,6))
plt.plot(x_tr, y_tr, '--k')
plt.plot(x_tr, y_lr, 'g')
plt.plot(x, y, 'ok', ms=10)
plt.xlim(0,1)
plt.ylim(y.min() - 1, y.max() + 1)
plt.title('Linear Regression')


Out[13]:
<matplotlib.text.Text at 0x10e3612e8>

In [25]:
lrp = lm.LinearRegression()
plt.figure(figsize=(12,6))
plt.plot(x_tr, y_tr, '--k')

#for deg, s in zip(range(2,5), ['-', '.', '-', '.', '-', '.', '-', '.']):
for deg, s in zip([2, 5], ['-', '.']):
    print(deg)
    print(s)
    lrp.fit(np.vander(x, deg + 1), y)
    y_lrp = lrp.predict(np.vander(x_tr, deg + 1))
    plt.plot(x_tr, y_lrp, s, label='degree ' + str(deg))
    plt.legend(loc=2)
    plt.xlim(0, 1.4)
    plt.ylim(-10, 40)
    # print the model's coefficients
    print(' '.join(['%.2f' % c for c in lrp.coef_]))

plt.plot(x, y, 'ok', ms=10)
plt.title('Linear Regression')


2
-
23.28 -6.80 0.00
5
.
-191.73 488.98 -411.43 144.01 -12.46 0.00
Out[25]:
<matplotlib.text.Text at 0x10d7a4d68>

In [26]:
lrp = lm.RidgeCV()
plt.figure(figsize=(12,6))
plt.plot(x_tr, y_tr, '--k')

#for deg, s in zip(range(2,5), ['-', '.', '-', '.', '-', '.', '-', '.']):
for deg, s in zip([2, 5], ['-', '.']):
    print(deg)
    print(s)
    lrp.fit(np.vander(x, deg + 1), y)
    y_lrp = lrp.predict(np.vander(x_tr, deg + 1))
    plt.plot(x_tr, y_lrp, s, label='degree ' + str(deg))
    plt.legend(loc=2)
    plt.xlim(0, 1.4)
    plt.ylim(-10, 40)
    # print the model's coefficients
    print(' '.join(['%.2f' % c for c in lrp.coef_]))

plt.plot(x, y, 'ok', ms=10)
plt.title('Linear Regression')


2
-
11.03 4.96 0.00
5
.
3.43 3.58 3.68 3.63 3.05 0.00
Out[26]:
<matplotlib.text.Text at 0x11406aeb8>

In [ ]: