# Linear Regression using sci-kit learn

``````

In [42]:

import numpy as np
from sklearn import linear_model

``````
``````

In [43]:

k = 20
n = 4
m = 2

x = np.random.randn(k+1, n)
u = np.random.randn(k, m)

Y = x[1:,:]
X = np.ones((k, n+m))
X[:,:n] = x[:-1,:].copy()
X[:,n:] = u.copy()

regr = linear_model.LinearRegression()

regr.fit(X, Y)

``````
``````

Out[43]:

LinearRegression(copy_X=True, fit_intercept=True, n_jobs=1, normalize=False)

``````
``````

In [44]:

print(regr.coef_)
print(regr.intercept_)
print(regr.score(X, Y))

``````
``````

[[-0.10810856  0.16971065 -0.53428562  0.36952053  0.4285531  -0.94652769]
[-0.20494069 -0.49207163  0.41899599  0.19284523  0.34929779  0.30053433]
[-0.2932994  -0.26499607  0.21238407  0.01456724  0.37033315  0.13977884]
[-0.17850806 -0.11582589 -0.05792805  0.18524144  0.36433666  0.11425907]]
[-0.01036753  0.23140733  0.00551828 -0.37326779]
0.345467075136

``````
``````

In [52]:

print(regr.predict(X[:2,:]))

print(np.dot(X[:2,:], regr.coef_.T) + regr.intercept_)

``````
``````

[[-0.93520718  0.43953176 -0.00835891 -0.84171422]
[-1.12362099  0.86308369  0.80096951  0.12713858]]
[[-0.93520718  0.43953176 -0.00835891 -0.84171422]
[-1.12362099  0.86308369  0.80096951  0.12713858]]

``````