In [ ]:
%run basics
%matplotlib

In [ ]:
def residuals_RHLRC_D(params,NEE,Fsd,D,T,D0,E0,weighted,weight_type):
    alpha = params[0]
    beta = params[1]
    k = params[2]
    rb = params[3]
    resid = NEE - NEE_RHLRC_D(Fsd,D,T,alpha,beta,k,D0,rb,E0)
    if weighted:
        weights = CalculateWeights(resid,weight_type=weight_type)
        return resid*weights
    else:
        return resid

def NEE_RHLRC_D(Fsd,D,T,alpha,beta,k,D0,rb,E0):
    NEE = -1*GPP_RHLRC_D(Fsd,D,alpha,beta,k,D0) + ER_LloydTaylor(T,rb,E0)
    return NEE

def GPP_RHLRC_D(Fsd,D,alpha,beta,k,D0):
    beta = beta*SHD_func_Lasslop(D,k,D0)
    GPP = alpha*beta*Fsd/(alpha*Fsd+beta)
    return GPP

def ER_LloydTaylor(T,rb,E0):
#    return rb*numpy.exp(E0*(1/(c.Tref-c.T0)-1/(T-c.T0)))
    return rb*numpy.exp(E0*(1/(288.13-227.13)-1/((T+273.15)-227.13)))

def SHD_func_Lasslop(D,k,D0):
    SHD_func = numpy.ones(len(D))
    idx = numpy.where(D>D0)[0]
    SHD_func[idx] = numpy.exp(-k*(D[idx]-D0))
    return SHD_func

In [ ]:
def irls_leastsq(func,p0,args=(),maxfev=3,weight_type='Huber',mode="quiet"):
    weighted = False
    popt,pcov = scipy.optimize.leastsq(func,p0,args=args+(weighted,weight_type))
    if mode!="quiet":
        print 'After non-weighted call to leastsq: ',popt
    n=1
    weighted = True
    while n<=maxfev:
        popt,pcov = scipy.optimize.leastsq(func,popt,args=args+(weighted,weight_type))
        if mode!="quiet":
            print 'Weighted call '+str(n)+' to leastsq: ',popt
        n = n + 1
    return popt,pcov

def CalculateWeights(resid,weight_type='Huber'):
    if weight_type.lower()=='tukey':
        weights = TukeyBiweight(MAD_scale(resid))
    elif weight_type.lower()=='huber':
        weights = HuberTWeight(MAD_scale(resid))
    elif weight_type.lower()=='ols':
        weights = OLSWeight(MAD_scale(resid))
    else:
        print "CalculateWeights: Unknown weight type, used Huber's t weight"
        weights = HuberTWeight(MAD_scale(resid))
    return weights

def TukeyBiweight(x):
    return ((numpy.abs(x)<1.0)*(1.0 - x**2)**2)

def HuberTWeight(x):
    weights = numpy.ones(len(x))
    idx = numpy.where(numpy.abs(x)>1.345)[0]
    weights[idx] = 1.345/numpy.abs(x[idx])
    return weights

def OLSWeight(x):
    weights = numpy.ones(len(x))
    return weights

def MAD_scale(resid):
    scale = numpy.median(numpy.abs(resid - numpy.median(resid))) / 0.6745
    return resid/scale

In [ ]:
# get the netCDF file name
ncname = "../Sites/Whroo/Data/Portal/Whroo_L4.nc"
# read the contents into a data structure
ds = qcio.nc_read_series(ncname)
# get the datetime series
dt = ds.series["DateTime"]["Data"]

In [ ]:
# open an Excel workbook for the results
xlname = ncname.replace(".nc","_LRC.xls")
xlfile = qcio.xl_open_write(xlname)

In [ ]:
# rectangular hyperbolic LRC, with D function
# set the downwelling shortwave radiation and friction velocity thresholds
Fsd_threshold = 10
ustar_threshold = 0.40
D0 = 0.5
E0 = 191
# add a sheet to the Excel workbook
xlsheet = xlfile.add_sheet("RHLRC_D")
# get the index of the first period at the start of the first whole month
si = qcutils.GetDateIndex(dt,str(dt[0]),match="startnextmonth")
# get the start datetime
start_date = dt[si]
# create a dictionary for the results
RHLRC_D = {"DateTime":{"data":[],"units":"-","format":"dd/mm/yyyy hh:mm"},
           "alpha":{"data":[],"units":"-","format":"0.0000"},
           "beta":{"data":[],"units":"-","format":"0.00"},
           "k":{"data":[],"units":"-","format":"0.000"},
           "rb":{"data":[],"units":"-","format":"0.00"},
           "ER":{"data":[],"units":"-","format":"0.00"}}
# loop over periods until done
while start_date<dt[-1]:
    # get the end datetime for this pass
    end_date = start_date+dateutil.relativedelta.relativedelta(months=1)
    # indices of start and end datetimes
    si = qcutils.GetDateIndex(dt,str(start_date),default=0)
    ei = qcutils.GetDateIndex(dt,str(end_date),default=len(dt))
    # minimum number of points is 10%
    min_points = int(0.1*(ei-si))
    mi = int((si+ei)/2)
    # get the downwelling shortwave, CO2 flux and friction velocity
    Fsd,f,a = qcutils.GetSeriesasMA(ds,"Fsd",si=si,ei=ei)
    NEE,f,a = qcutils.GetSeriesasMA(ds,"Fc",si=si,ei=ei)
    ustar,f,a = qcutils.GetSeriesasMA(ds,"ustar",si=si,ei=ei)
    VPD,f,a = qcutils.GetSeriesasMA(ds,"VPD",si=si,ei=ei)
    T,f,a = qcutils.GetSeriesasMA(ds,"Ta",si=si,ei=ei)
    # mask out the night-time ...
    Fsd_day = numpy.ma.masked_where(Fsd<Fsd_threshold,Fsd)
    VPD_day = numpy.ma.masked_where(Fsd<Fsd_threshold,VPD)
    T_day = numpy.ma.masked_where(Fsd<Fsd_threshold,T)
    NEE_day = numpy.ma.masked_where(Fsd<Fsd_threshold,NEE)
    # get the mask across all data (one masked ==> all masked)
    ones = numpy.ma.ones(len(Fsd))
    for item in [Fsd_day,VPD_day,T_day,NEE_day]:
        mask = numpy.ma.mask_or(numpy.ma.getmaskarray(ones),
                                numpy.ma.getmaskarray(item),
                                copy=True)
    # remove masked elements
    for item in [Fsd_day,VPD_day,T_day,NEE_day]:
        item = numpy.ma.compressed(numpy.ma.masked_where(mask,item))
    # mask out the day-time conditions ...
    NEE_night = numpy.ma.masked_where(Fsd>=Fsd_threshold,NEE)
    T_night = numpy.ma.masked_where(Fsd>=Fsd_threshold,T)
    # get the mask across all data (one masked ==> all masked)
    ones = numpy.ma.ones(len(Fsd))
    for item in [NEE_night,T_night]:
        mask = numpy.ma.mask_or(numpy.ma.getmaskarray(ones),
                                numpy.ma.getmaskarray(item),
                                copy=True)
    # remove masked elements
    for item in [NEE_night,T_night]:
        item = numpy.ma.compressed(numpy.ma.masked_where(mask,item))
    # add the mid-period datetime to the results dictionary
    RHLRC_D["DateTime"]["data"].append(dt[mi])
    # call the optimisation if more than the minimum number of
    # points available, otherwise set results to missing value
    # set the priors
    rb_prior = numpy.ma.mean(NEE_night)
    beta_prior = numpy.abs(numpy.percentile(NEE_day,5)-numpy.percentile(NEE_day,95))
    #print rb_prior,beta_prior
    p0 = [0.01,20,0,2]
    if len(Fsd_day)>=min_points:
        popt,pcov = irls_leastsq(residuals_RHLRC_D,p0,
                                 args=(NEE_day,Fsd_day,VPD_day,T_day,D0,E0),
                                 maxfev=3,weight_type='Huber')
    else:
        popt = [-9999,-9999,-9999,-9999]
    RHLRC_D["alpha"]["data"].append(popt[0])
    RHLRC_D["beta"]["data"].append(popt[1])
    RHLRC_D["k"]["data"].append(popt[2])
    RHLRC_D["rb"]["data"].append(popt[3])
    # plot the data
    if popt[0]!=-9999:
        NEE_fit = NEE_RHLRC_D(Fsd_day,VPD_day,T_day,popt[0],popt[1],popt[2],D0,popt[3],E0)
        fig=plt.figure()
        plt.plot(Fsd_day,NEE_day,'b.')
        plt.plot(Fsd_day,NEE_fit,'r+')
        plt.show()
    # get the average of the u*-filtered, nocturnal Fc
    NEE_night = numpy.ma.masked_where(ustar<=ustar_threshold,NEE_night)
    if len(NEE_night)>=min_points:
        RHLRC_D["ER"]["data"].append(numpy.ma.mean(NEE_night))
    else:
        RHLRC_D["ER"]["data"].append(-9999)
    start_date = end_date
# write the results to the Excel workbook
qcio.xl_write_data(xlsheet,RHLRC_D,xlCol=0)

In [ ]:
xlfile.save(xlname)