In [2]:
%matplotlib inline
import matplotlib.pyplot as plt
import numpy as np
import scipy.optimize as opt
For this problem we are going to work with the following model:
$$ y_{model}(x) = a x^2 + b x + c $$The true values of the model parameters are as follows:
In [3]:
a_true = 0.5
b_true = 2.0
c_true = -4.0
First, generate a dataset using this model using these parameters and the following characteristics:
size argument of np.random.normal).After you generate the data, make a plot of the raw data (use points).
In [14]:
x = np.linspace(-5,5,30)
n = np.random.normal(0,2.0,30)
y = a_true*x*x + b_true*x + c_true
y = y + n
plt.plot(x,y,ls='None',marker='.')
Out[14]:
In [ ]:
assert True # leave this cell for grading the raw data generation and plot
Now fit the model to the dataset to recover estimates for the model's parameters:
In [15]:
def model(x,a,b,c):
return a*x*x + b*x + c
In [16]:
best, cov = opt.curve_fit(model, x, y)
print('a = {0:.3f} +/- {1:.3f}'.format(best[0], np.sqrt(cov[0,0])))
print('b = {0:.3f} +/- {1:.3f}'.format(best[1], np.sqrt(cov[1,1])))
print('c = {0:.3f} +/- {1:.3f}'.format(best[2], np.sqrt(cov[2,2])))
In [ ]:
assert True # leave this cell for grading the fit; should include a plot and printout of the parameters+errors
In [19]:
x_new = np.linspace(-5,5,30)
y_new = model(x,best[0],best[1],best[2])
plt.plot(x_new,y_new)
plt.plot(x,y,ls='None',marker='.')
Out[19]:
In [ ]: