In [1]:
%run basics
%matplotlib
In [2]:
def residuals_RHLRC_NoD(params,NEE,Fsd,weighted,weight_type):
alpha = params[0]
beta = params[1]
gamma = params[2]
resid = NEE - NEE_RHLRC_NoD(Fsd,alpha,beta,gamma)
if weighted:
weights = CalculateWeights(resid,weight_type=weight_type)
return resid*weights
else:
return resid
def NEE_RHLRC_NoD(Fsd,alpha,beta,gamma):
NEE = -1*GPP_RHLRC_NoD(Fsd,alpha,beta) + gamma
return NEE
def GPP_RHLRC_NoD(Fsd,alpha,beta):
GPP = alpha*beta*Fsd/(alpha*Fsd+beta)
return GPP
In [3]:
def residuals_RHLRC_D(params,NEE,Fsd,D,T,D0,E0,weighted,weight_type):
alpha = params[0]
beta = params[1]
k = params[2]
rb = params[3]
resid = NEE - NEE_RHLRC_D(Fsd,D,T,alpha,beta,k,D0,rb,E0)
if weighted:
weights = CalculateWeights(resid,weight_type=weight_type)
return resid*weights
else:
return resid
def NEE_RHLRC_D(Fsd,D,T,alpha,beta,k,D0,rb,E0):
NEE = -1*GPP_RHLRC_D(Fsd,D,alpha,beta,k,D0) + ER_LloydTaylor(T,rb,E0)
return NEE
def GPP_RHLRC_D(Fsd,D,alpha,beta,k,D0):
beta = beta*SHD_func_Lasslop(D,k,D0)
GPP = alpha*beta*Fsd/(alpha*Fsd+beta)
return GPP
def ER_LloydTaylor(T,rb,E0):
# return rb*numpy.exp(E0*(1/(c.Tref-c.T0)-1/(T-c.T0)))
return rb*numpy.exp(E0*(1/(288.13-227.13)-1/((T+273.15)-227.13)))
def SHD_func_Lasslop(D,k,D0):
SHD_func = numpy.ones(len(D))
idx = numpy.where(D>D0)[0]
SHD_func[idx] = numpy.exp(-k*(D[idx]-D0))
return SHD_func
In [4]:
def irls_leastsq(func,p0,args=(),maxfev=3,weight_type='Huber',mode="quiet"):
weighted = False
popt,pcov = scipy.optimize.leastsq(func,p0,args=args+(weighted,weight_type))
if mode!="quiet":
print 'After non-weighted call to leastsq: ',popt
n=1
weighted = True
while n<=maxfev:
popt,pcov = scipy.optimize.leastsq(func,popt,args=args+(weighted,weight_type))
if mode!="quiet":
print 'Weighted call '+str(n)+' to leastsq: ',popt
n = n + 1
return popt,pcov
def CalculateWeights(resid,weight_type='Huber'):
if weight_type.lower()=='tukey':
weights = TukeyBiweight(MAD_scale(resid))
elif weight_type.lower()=='huber':
weights = HuberTWeight(MAD_scale(resid))
elif weight_type.lower()=='ols':
weights = OLSWeight(MAD_scale(resid))
else:
print "CalculateWeights: Unknown weight type, used Huber's t weight"
weights = HuberTWeight(MAD_scale(resid))
return weights
def TukeyBiweight(x):
return ((numpy.abs(x)<1.0)*(1.0 - x**2)**2)
def HuberTWeight(x):
weights = numpy.ones(len(x))
idx = numpy.where(numpy.abs(x)>1.345)[0]
weights[idx] = 1.345/numpy.abs(x[idx])
return weights
def OLSWeight(x):
weights = numpy.ones(len(x))
return weights
def MAD_scale(resid):
scale = numpy.median(numpy.abs(resid - numpy.median(resid))) / 0.6745
return resid/scale
In [45]:
# get the netCDF file name
ncname = "../Sites/Calperum/Data/Portal/Calperum_L3.nc"
# read the contents into a data structure
ds = qcio.nc_read_series(ncname)
# get the datetime series
dt = ds.series["DateTime"]["Data"]
In [46]:
# open an Excel workbook for the results
xlname = ncname.replace(".nc","_LRC.xls")
xlfile = qcio.xl_open_write(xlname)
In [47]:
# rectangular hyperbolic LRC, no D function
# set the downwelling shortwave radiation and friction velocity thresholds
Fsd_threshold = 10
ustar_threshold = 0.24
# add a sheet to the Excel workbook
xlsheet = xlfile.add_sheet("RHLRC_NoD")
# get the index of the first period at the start of the first whole month
si = qcutils.GetDateIndex(dt,str(dt[0]),match="startnextmonth")
# get the start datetime
start_date = dt[si]
# create a dictionary for the results
mta = numpy.array([])
RHLRC_NoD = {"DateTime":{"data":mta,"units":"-","format":"dd/mm/yyyy hh:mm"},
"alpha":{"data":mta,"units":"-","format":"0.0000"},
"beta":{"data":mta,"units":"-","format":"0.00"},
"gamma":{"data":mta,"units":"-","format":"0.00"},
"ER":{"data":mta,"units":"-","format":"0.00"}}
ER = RHLRC_NoD["ER"]["data"]
# loop over periods until done
while start_date<dt[-1]:
# get the end datetime for this pass
end_date = start_date+dateutil.relativedelta.relativedelta(months=1)
# indices of start and end datetimes
si = qcutils.GetDateIndex(dt,str(start_date),default=0)
ei = qcutils.GetDateIndex(dt,str(end_date),default=len(dt))
# minimum number of points is 10%
min_points = int(0.1*(ei-si))
mi = int((si+ei)/2)
# get the downwelling shortwave, CO2 flux and friction velocity
Fsd,f,a = qcutils.GetSeriesasMA(ds,"Fsd",si=si,ei=ei)
Fc,f,a = qcutils.GetSeriesasMA(ds,"Fc",si=si,ei=ei)
ustar,f,a = qcutils.GetSeriesasMA(ds,"ustar",si=si,ei=ei)
# mask out the night-time conditions
Fc_day = numpy.ma.masked_where(Fsd<Fsd_threshold,Fc)
# impose the same mask on Fsd and Fc
mask = numpy.ma.mask_or(numpy.ma.getmaskarray(Fsd),
numpy.ma.getmaskarray(Fc_day),
copy=True)
# remove masked elements
NEE_day = numpy.ma.compressed(numpy.ma.masked_where(mask,Fc_day))
Fsd_day = numpy.ma.compressed(numpy.ma.masked_where(mask,Fsd))
# add the mid-period datetime to the results dictionary
RHLRC_NoD["DateTime"]["data"] = numpy.append(RHLRC_NoD["DateTime"]["data"],dt[mi])
# call the optimisation if more than the minimum number of
# points available, otherwise set results to missing value
if len(Fsd_day)>=min_points:
popt,pcov = irls_leastsq(residuals_RHLRC_NoD,[0.1,100,1],
args=(NEE_day,Fsd_day),maxfev=3,
weight_type='Huber')
RHLRC_NoD["alpha"]["data"] = numpy.append(RHLRC_NoD["alpha"]["data"],popt[0])
RHLRC_NoD["beta"]["data"] = numpy.append(RHLRC_NoD["beta"]["data"],popt[1])
RHLRC_NoD["gamma"]["data"] = numpy.append(RHLRC_NoD["gamma"]["data"],popt[2])
else:
RHLRC_NoD["alpha"]["data"] = numpy.append(RHLRC_NoD["alpha"]["data"],-9999)
RHLRC_NoD["beta"]["data"] = numpy.append(RHLRC_NoD["beta"]["data"],-9999)
RHLRC_NoD["gamma"]["data"] = numpy.append(RHLRC_NoD["gamma"]["data"],-9999)
# plot the data
#Fsd_fit = numpy.arange(0,numpy.ma.max(Fsd_day),1)
#NEE_fit = NEE_RHLRC_NoD(Fsd_fit,popt[0],popt[1],popt[2])
#fig=plt.figure()
#plt.plot(Fsd_day,NEE_day,'b.')
#plt.plot(Fsd_fit,NEE_fit,'r-')
#plt.show()
# get the average of the u*-filtered, nocturnal Fc
Fc_night = numpy.ma.masked_where(Fsd>=Fsd_threshold,Fc)
NEE_night = numpy.ma.masked_where(ustar<=ustar_threshold,Fc_night)
if numpy.ma.count(NEE_night)>=min_points:
RHLRC_NoD["ER"]["data"] = numpy.append(RHLRC_NoD["ER"]["data"],numpy.mean(NEE_night))
else:
RHLRC_NoD["ER"]["data"] = numpy.append(RHLRC_NoD["ER"]["data"],-9999)
start_date = end_date
# write the results to the Excel workbook
qcio.xl_write_data(xlsheet,RHLRC_NoD,xlCol=0)
In [48]:
# plot a time series of gamma and observed ER
ER_modelled = numpy.ma.array(RHLRC_NoD["gamma"]["data"])
ER_modelled = numpy.ma.masked_where(ER_modelled==-9999,ER_modelled)
ER_observed = numpy.ma.array(RHLRC_NoD["ER"]["data"])
ER_observed = numpy.ma.masked_where(ER_observed==-9999,ER_observed)
fig = plt.figure()
plt.plot(RHLRC_NoD["DateTime"]["data"],ER_modelled,'bo-')
plt.plot(RHLRC_NoD["DateTime"]["data"],ER_observed,'ro-')
plt.show()
In [49]:
# rectangular hyperbolic LRC, with D function
# set the downwelling shortwave radiation and friction velocity thresholds
Fsd_threshold = 10
ustar_threshold = 0.24
D0 = 1.0
E0 = 153
# add a sheet to the Excel workbook
xlsheet = xlfile.add_sheet("RHLRC_D")
# get the index of the first period at the start of the first whole month
si = qcutils.GetDateIndex(dt,str(dt[0]),match="startnextmonth")
# get the start datetime
start_date = dt[si]
# create a dictionary for the results
mta = numpy.array([])
RHLRC_D = {"DateTime":{"data":mta,"units":"-","format":"dd/mm/yyyy hh:mm"},
"alpha":{"data":mta,"units":"-","format":"0.0000"},
"beta":{"data":mta,"units":"-","format":"0.00"},
"k":{"data":mta,"units":"-","format":"0.000"},
"rb":{"data":mta,"units":"-","format":"0.00"},
"ER":{"data":mta,"units":"-","format":"0.00"}}
# loop over periods until done
while start_date<dt[-1]:
# get the end datetime for this pass
end_date = start_date+dateutil.relativedelta.relativedelta(months=1)
# indices of start and end datetimes
si = qcutils.GetDateIndex(dt,str(start_date),default=0)
ei = qcutils.GetDateIndex(dt,str(end_date),default=len(dt))
# minimum number of points is 10%
min_points = int(0.1*(ei-si))
mi = int((si+ei)/2)
# get the downwelling shortwave, CO2 flux and friction velocity
Fsd,f,a = qcutils.GetSeriesasMA(ds,"Fsd",si=si,ei=ei)
NEE,f,a = qcutils.GetSeriesasMA(ds,"Fc",si=si,ei=ei)
ustar,f,a = qcutils.GetSeriesasMA(ds,"ustar",si=si,ei=ei)
VPD,f,a = qcutils.GetSeriesasMA(ds,"VPD",si=si,ei=ei)
T,f,a = qcutils.GetSeriesasMA(ds,"Ta",si=si,ei=ei)
# mask out the night-time ...
Fsd_day = numpy.ma.masked_where(Fsd<Fsd_threshold,Fsd)
VPD_day = numpy.ma.masked_where(Fsd<Fsd_threshold,VPD)
T_day = numpy.ma.masked_where(Fsd<Fsd_threshold,T)
NEE_day = numpy.ma.masked_where(Fsd<Fsd_threshold,NEE)
# get the mask across all data (one masked ==> all masked)
ones = numpy.ma.ones(len(Fsd))
for item in [Fsd_day,VPD_day,T_day,NEE_day]:
mask = numpy.ma.mask_or(numpy.ma.getmaskarray(ones),
numpy.ma.getmaskarray(item),
copy=True)
# remove masked elements
for item in [Fsd_day,VPD_day,T_day,NEE_day]:
item = numpy.ma.compressed(numpy.ma.masked_where(mask,item))
# mask out the day-time conditions ...
NEE_night = numpy.ma.masked_where(Fsd>=Fsd_threshold,NEE)
T_night = numpy.ma.masked_where(Fsd>=Fsd_threshold,T)
# get the mask across all data (one masked ==> all masked)
ones = numpy.ma.ones(len(Fsd))
for item in [NEE_night,T_night]:
mask = numpy.ma.mask_or(numpy.ma.getmaskarray(ones),
numpy.ma.getmaskarray(item),
copy=True)
# remove masked elements
for item in [NEE_night,T_night]:
item = numpy.ma.compressed(numpy.ma.masked_where(mask,item))
# add the mid-period datetime to the results dictionary
RHLRC_D["DateTime"]["data"] = numpy.append(RHLRC_D["DateTime"]["data"],dt[mi])
# call the optimisation if more than the minimum number of
# points available, otherwise set results to missing value
# set the priors
rb_prior = numpy.ma.mean(NEE_night)
beta_prior = numpy.abs(numpy.percentile(NEE_day,5)-numpy.percentile(NEE_day,95))
#print rb_prior,beta_prior
p0 = [0.01,20,0,2]
if len(Fsd_day)>=min_points:
popt,pcov = irls_leastsq(residuals_RHLRC_D,p0,
args=(NEE_day,Fsd_day,VPD_day,T_day,D0,E0),
maxfev=3,weight_type='Huber')
else:
popt = [-9999,-9999,-9999,-9999]
RHLRC_D["alpha"]["data"] = numpy.append(RHLRC_D["alpha"]["data"],popt[0])
RHLRC_D["beta"]["data"] = numpy.append(RHLRC_D["beta"]["data"],popt[1])
RHLRC_D["k"]["data"] = numpy.append(RHLRC_D["k"]["data"],popt[2])
RHLRC_D["rb"]["data"] = numpy.append(RHLRC_D["rb"]["data"],popt[3])
# plot the data
#if popt[0]!=-9999:
#NEE_fit = NEE_RHLRC_D(Fsd_day,VPD_day,T_day,popt[0],popt[1],popt[2],D0,popt[3],E0)
#fig=plt.figure()
#plt.plot(Fsd_day,NEE_day,'b.')
#plt.plot(Fsd_day,NEE_fit,'r+')
#plt.show()
# get the average of the u*-filtered, nocturnal Fc
NEE_night = numpy.ma.masked_where(ustar<=ustar_threshold,NEE_night)
NEE_night = numpy.ma.compressed(NEE_night)
if len(NEE_night)>=min_points:
RHLRC_D["ER"]["data"] = numpy.append(RHLRC_D["ER"]["data"],numpy.mean(NEE_night))
else:
RHLRC_D["ER"]["data"] = numpy.append(RHLRC_D["ER"]["data"],-9999)
start_date = end_date
# write the results to the Excel workbook
qcio.xl_write_data(xlsheet,RHLRC_D,xlCol=0)
In [50]:
xlfile.save(xlname)
In [ ]: