# Linear Regression - Part 1

y = m * x + c

## Imports

``````

In [ ]:

import numpy as np
import matplotlib.pyplot as plt
from sklearn import linear_model

%matplotlib inline

``````

## Data

``````

In [ ]:

data_points = 100
X = np.arange(-20, data_points - 20).reshape(-1,1) # one columns and many rows
m = np.random.rand() * 10
c = np.random.rand() * 10
Y = m * X + c + np.random.rand() * 5

``````
``````

In [ ]:

X[:5]

``````
``````

In [ ]:

Y[:5]

``````

## Modelling

``````

In [ ]:

regr = linear_model.LinearRegression()
regr.fit(X, Y)

``````

## Evaluation

``````

In [ ]:

from sklearn.metrics.regression import mean_squared_error

mean_squared_error(Y, regr.predict(X))

``````
``````

In [ ]:

print('Predicted Linear Regression Coefficient:', regr.coef_, '\t Actual Value:', m)
print('Predicted Linear Regression X-Intercept:', regr.intercept_, '\t Actual Value:', c)

``````

## Visualisation

``````

In [ ]:

plt.figure(figsize=(15, 5))
plt.plot(X, Y, color='b', alpha=0.4)
plt.plot(X, regr.predict(X) + 5, color='r', alpha=0.4)
plt.legend(['Actual Line', 'Predicted Line + 5 offset'])
plt.xlabel('X - Input values')
plt.ylabel('Y - Response values')

``````