In [ ]:
%matplotlib inline
from scipy import stats
import numpy as np
In [ ]:
x = np.arange(1, 10, 1)
y = 2. * x + 1.
plt.plot(x, y, ".")
slope, intercept, r_value, p_value, std_err = stats.linregress(x, y)
print(slope, intercept, r_value, p_value, std_err)
print("r-squared:", r_value**2)
In [ ]:
x = np.arange(1., 10., 1.)
x += np.random.normal(0, 1, x.shape[0])
y = 2. * x + 1.
y += np.random.normal(0, 1, x.shape[0])
slope, intercept, r_value, p_value, std_err = stats.linregress(x, y)
print(slope, intercept, r_value, p_value, std_err)
print("r-squared:", r_value**2)
plt.plot(x, y, ".")
plt.plot([x.min(), x.max()], [slope * x.min() + intercept, slope * x.max() + intercept], "-r")
In [ ]:
x = np.random.random(10)
y = np.random.random(10)
slope, intercept, r_value, p_value, std_err = stats.linregress(x, y)
plt.plot(x, y, ".")
plt.plot(x, y, ".")
print(slope, intercept, r_value, p_value, std_err)
print("r-squared:", r_value**2)
In [ ]:
from scipy import linspace, polyval, polyfit, sqrt, stats, randn
from pylab import plot, title, show , legend
#Linear regression example
# This is a very simple example of using two scipy tools
# for linear regression, polyfit and stats.linregress
#Sample data creation
#number of points
n=50
t=linspace(-5,5,n)
#parameters
a=0.8; b=-4
x=polyval([a,b],t)
#add some noise
xn=x+randn(n)
#Linear regressison -polyfit - polyfit can be used other orders polys
(ar,br)=polyfit(t,xn,1)
xr=polyval([ar,br],t)
#compute the mean square error
err=sqrt(sum((xr-xn)**2)/n)
print('Linear regression using polyfit')
print('parameters: a=%.2f b=%.2f \nregression: a=%.2f b=%.2f, ms error= %.3f' % (a,b,ar,br,err))
#matplotlib ploting
title('Linear Regression Example')
plot(t,x,'g.--')
plot(t,xn,'k.')
plot(t,xr,'r.-')
legend(['original','plus noise', 'regression'])
show()
#Linear regression using stats.linregress
(a_s,b_s,r,tt,stderr)=stats.linregress(t,xn)
print('Linear regression using stats.linregress')
print('parameters: a=%.2f b=%.2f \nregression: a=%.2f b=%.2f, std error= %.3f' % (a,b,a_s,b_s,stderr))