This is a test heading. Where do we go from here?

  • This is how we itemize.
  • One more time for completeness.

The calculus formula for the propagation of errors is really an approximation. This is the formula for a general $q(x,y)$: $$\sigma_q^2 = \left( \frac{\partial q}{\partial x} \sigma_x \right)^2 + \left( \frac{\partial q}{\partial y}\sigma_y \right)^2 $$


In [2]:
%matplotlib inline
import numpy as np
from scipy.optimize import minimize
import scipy.stats as stats
from scipy.stats import chi2
import time
import matplotlib.pylab as plt
import seaborn
outfilepath = '/Users/Jatan/Google Drive/PHYS2010/'   #modify this path to your plot directory

#setting up x values
x=np.linspace(0, 10, 11)

#input data
data = [9.9078, 3.1797, 17.9771, 28.0620, 35.3188, 59.4874, 69.7478, 95.4985, 115.0069, 164.3853, 165.3513]
err_std  = [10 * np.ones(len(data))[i] for i in range(len(data))]   #for plotting error bars
err_large = [14 * np.ones(len(data))[i] for i in range(len(data))]

def neg_loglhood_lin_2d(params):
    m = params

    ymod = m*x
    log_lik_lin = -np.sum(stats.norm.logpdf(data, loc=ymod, scale=err_std) )
    return(log_lik_lin)

def neg_loglhood_parabolic(params):
    a = params

    ymod = a*pow(x, 2)
    log_lik_plaw = -np.sum(stats.norm.logpdf(data, loc=ymod, scale=err_std) )
    return(log_lik_plaw)

def neg_loglhood_plaw(params):
    a, b = params

    ymod = a*pow(x, b)
    log_lik_plaw = -np.sum(stats.norm.logpdf(data, loc=ymod, scale=err_std) )
    return(log_lik_plaw)

#initial parameter guesses    
init_params_2d = [1, 1]
init_params = [1,]

#minimize the log likelihood or equivalently maximize the likelihood
result_parabolic = minimize(neg_loglhood_parabolic, init_params, method='nelder-mead')
equation_parabolic = 'y =' + str(round(result_parabolic.x[0], 4)) + '*' + 'x^2' 

result_lin_2d = minimize(neg_loglhood_lin_2d, init_params, method='nelder-mead')
equation_lin_2d = 'y =' + str(round(result_lin_2d.x[0], 4)) + '*' + 'x'

result_plaw = minimize(neg_loglhood_plaw, init_params_2d, method='nelder-mead')
equation_plaw = 'y =' + str(round(result_plaw.x[0], 4)) + '*' + 'x^' + str(round(result_plaw.x[1], 4))

#print the results as a sanity check!
#print result_plaw.x

#plotting routine   #substitute _lin for _plaw to obtain plot for linear model
fig, ax = plt.subplots(1,1)
plt.plot(x, result_plaw.x[0]*pow(x, result_plaw.x[1]), lw=2, color='black', label = 'best-fit') #result_lin_2d.x[0]*x #result_parabolic.x[0]*pow(x,2) #result_plaw.x[0]*pow(x, result_plaw.x[1]
plt.errorbar(x, data, yerr=err_large, color='red', fmt='o')
plt.xlim(-1, 11)
plt.suptitle("MLE: Maximum Likelihood Estimation (v3)")
ax.text(0.5, 0.9, equation_plaw, horizontalalignment='center', verticalalignment='center', transform=ax.transAxes) #equation_lin_2d #equation_parabolic 
plt.legend(loc='upper left', prop={'size':12}, frameon=False)
plt.savefig(outfilepath + 'powerlawfit.pdf')   #'linearfit.pdf' #'parabolicfit.pdf'



In [5]:
import pandas as pd

In [7]:
mydata = pd.read_csv('demo.csv')
mydata


Out[7]:
name time 1 time 2 time 3 dist 1 error dist 1 dist 2 error dist 2 area x len y len
0 name 7.38 7.39 7.74 0.325000 0.050 0.980000 0.0050 0.125000 0.250000 0.500000
1 name 7.63 7.45 7.68 0.300000 0.100 0.970000 0.0500 0.140000 0.280000 0.500000
2 name 7.55 7.44 7.70 0.300000 0.100 0.980000 0.0400 0.125000 0.250000 0.330000
3 name 7.73 7.68 7.76 0.320000 0.010 0.995000 0.0050 0.187500 0.375000 0.750000
4 name 7.11 7.48 7.71 0.358000 0.001 0.967800 0.0002 0.168000 0.340000 0.452000
5 name 7.50 7.30 7.60 0.300000 0.050 0.980000 0.0300 0.170000 0.300000 0.450000
6 name 7.53 7.65 7.53 0.300000 0.100 0.950000 0.0200 0.200000 0.300000 0.500000
7 name 6.88 7.54 7.67 0.300000 0.050 0.950000 0.0250 0.142857 0.333333 0.428571
8 name 7.23 7.48 7.71 0.320000 0.030 0.960000 0.0200 0.225000 0.330000 0.500000
9 name 7.50 9.27 7.55 0.370000 0.100 0.920000 0.1000 0.142857 0.300000 0.500000
10 name 7.67 7.36 7.81 0.350000 0.050 0.970000 0.0300 0.230000 0.350000 0.580000
11 name 7.38 7.62 7.56 0.270000 0.050 0.980000 0.0500 0.170000 0.300000 0.500000
12 name 7.95 7.53 7.58 0.300000 0.100 0.900000 0.1000 0.160000 0.330000 0.500000
13 name 7.97 7.52 7.67 0.330000 0.010 0.900000 0.0100 0.166700 0.333300 0.500000
14 name 7.34 7.68 7.86 0.330000 0.050 0.990000 0.0500 0.125000 0.250000 0.500000
15 name 7.72 7.53 7.75 0.300000 0.050 0.950000 0.0500 0.200000 0.250000 0.500000
16 name 7.09 7.55 7.71 0.300000 0.100 0.950000 0.0500 0.200000 0.330000 0.500000
17 name 7.58 7.35 7.60 0.280000 0.050 0.980000 0.0500 0.200000 0.300000 0.600000
18 name 7.63 7.36 7.63 0.300000 0.100 0.970000 0.1000 0.125000 0.250000 0.500000
19 name 7.05 7.60 7.73 0.333333 0.100 0.960000 0.0100 0.200000 0.250000 0.500000
20 name 7.25 7.48 7.60 0.300000 0.100 0.950000 0.5000 0.160000 0.280000 0.500000
21 name 7.57 7.55 7.66 0.300000 0.050 0.950000 0.0500 0.200000 0.240000 0.500000
22 name 7.55 7.50 7.81 0.300000 0.050 0.970000 0.0500 0.190000 0.300000 0.600000
23 name 6.58 7.55 7.77 0.380000 0.500 0.900000 0.1000 0.180000 0.300000 0.500000
24 name 7.58 7.51 8.25 0.268900 0.150 0.990000 0.1000 0.150000 0.300000 0.500000
25 name 7.45 7.40 7.80 0.300000 0.100 0.960000 0.0200 0.200000 0.240000 0.500000
26 name 7.43 7.20 7.66 0.300000 0.100 0.950000 0.1000 0.160000 0.300000 0.500000
27 name 7.59 6.91 7.72 0.320000 0.050 0.960000 0.0500 0.170000 0.250000 0.550000
28 name 7.47 5.25 7.47 0.345553 0.111 0.999875 0.0908 0.156000 0.072000 0.090000
29 name 7.58 7.51 7.55 0.300000 0.050 0.998000 0.0500 0.200000 0.330000 0.600000
30 name 7.36 7.43 7.63 0.300000 0.500 0.900000 0.5000 0.190000 0.330000 0.660000
31 name 6.78 7.21 7.68 0.300000 0.100 0.950000 0.0500 0.200000 0.333000 0.500000
32 name 6.78 7.77 7.45 0.254400 0.500 0.980000 0.5000 0.165000 0.330000 0.460000

In [ ]: