In [1]:
%matplotlib inline
import pandas as pd
import unicodedata
import numpy as np
import os
import glob
import re
import xmltodict
import matplotlib.pyplot as plt
import matplotlib.dates as dates
import matplotlib.ticker as tick
from matplotlib.backends.backend_pdf import PdfPages
import snake

from datetime import datetime
from pylab import rcParams
rcParams['figure.figsize'] = 15, 10

In [2]:
Drive = 'U'

Use g[wellinfo[wellinfo['Well']==wellname]['closest_baro']] instead if you want to match the closest barometer to the data

Well Information


In [3]:
folder = Drive + ':\\GWP\\Groundwater\\Courthouse_Wash_Transducer_Data'
wellinfofile = Drive + ':\\GWP\\Groundwater\\Courthouse_Wash_Transducer_Data\\wellinfo.csv'

In [4]:
wellinfo = pd.read_csv(wellinfofile,header=0,index_col=0)
wellinfo["G_Elev_m"] = wellinfo["GroundElevation"]/3.2808
wellinfo["Well"] = wellinfo['Well'].apply(lambda x: str(x).lower().strip())
wellinfo['WellID'] = wellinfo.index.values

In [5]:
wellinfo


Out[5]:
SiteID Well WRNum WIN USGSNumber Offset GroundElevation ScreenInterval Latitude Longitude ... HasTemp BaroEfficiency BaroEfficiencyStart GraphMax GraphMin ImagePath LinkUSGSNWIS VegType G_Elev_m WellID
WellID
138 101 chwent 1101002M00 435519 3.841490e+14 1.34 4349.55 670-690 38.696903 -109.669292 ... 1 NaN NaN NaN NaN courthouse wash.jpg http://nwis.waterdata.usgs.gov/usa/nwis/gwleve... NaN 1325.758961 138
139 101 chwnav 1101002M00 435519 3.841490e+14 1.74 4349.55 860-880 38.696903 -109.669292 ... 1 NaN NaN NaN NaN courthouse wash.jpg http://nwis.waterdata.usgs.gov/usa/nwis/gwleve... NaN 1325.758961 139

2 rows × 44 columns

Manual Water Levels


In [6]:
manualwls = Drive + ':\\GWP\\Groundwater\\Courthouse_Wash_Transducer_Data\\manual_measurements.csv'
manual = pd.read_csv(manualwls, skiprows=0, parse_dates=1, index_col="DateTime", engine="python")

Barometric Pressure Data

Compilation of Barometric Pressure Data


In [7]:
barofile = folder + '\\*baro*'
barodata = snake.compilation(barofile)

In [8]:
barodata['Level']
barodata.tail(15)


Out[8]:
Temperature Level name
DateTime
2015-06-11 20:53:09 17.797 28.912167 chwbaro 2015_06_12
2015-06-11 21:53:09 17.800 28.910326 chwbaro 2015_06_12
2015-06-11 22:53:09 17.803 28.914073 chwbaro 2015_06_12
2015-06-11 23:53:09 17.805 28.904576 chwbaro 2015_06_12
2015-06-12 00:53:09 17.809 28.909122 chwbaro 2015_06_12
2015-06-12 01:53:09 17.812 28.902665 chwbaro 2015_06_12
2015-06-12 02:53:09 17.816 28.904308 chwbaro 2015_06_12
2015-06-12 03:53:09 17.819 28.906917 chwbaro 2015_06_12
2015-06-12 04:53:09 17.823 28.919694 chwbaro 2015_06_12
2015-06-12 05:53:09 17.826 28.931437 chwbaro 2015_06_12
2015-06-12 06:53:09 17.832 28.937459 chwbaro 2015_06_12
2015-06-12 07:53:09 17.839 28.945087 chwbaro 2015_06_12
2015-06-12 08:53:09 17.845 28.943012 chwbaro 2015_06_12
2015-06-12 09:53:09 17.850 28.935953 chwbaro 2015_06_12
2015-06-12 10:53:09 17.853 28.927690 chwbaro 2015_06_12

In [9]:
barodata = snake.hourly_resample(barodata)

In [10]:
infile = Drive + ":\\GWP\\Groundwater\\Courthouse_Wash_Transducer_Data\\"
pathlist = os.path.splitext(infile)[0].split('\\')
barodata.to_csv(pathlist[0] + '\\' + pathlist[1] + '\\' + pathlist[2] + '\\' + pathlist[3] + '\\' + pathlist[4] + '\\' + 'baro' + '.csv')

In [11]:
barodata[['Level']].plot()


Out[11]:
<matplotlib.axes._subplots.AxesSubplot at 0x86c3e70>

In [12]:
folder = Drive + ":\\GWP\\Groundwater\\Courthouse_Wash_Transducer_Data"
wellinfo = snake.make_files_table(folder, wellinfo)
wellinfo.tail(n=3)


Out[12]:
date extensions full_file_name siteid filedups LoggerTypeID_x LoggerTypeName SiteID Well WRNum ... HasTemp BaroEfficiency BaroEfficiencyStart GraphMax GraphMin ImagePath LinkUSGSNWIS VegType G_Elev_m WellID
0 2015_06_12 .xle chwent 2015_06_12.xle chwent True 2 Solinst 101 chwent 1101002M00 ... 1 NaN NaN NaN NaN courthouse wash.jpg http://nwis.waterdata.usgs.gov/usa/nwis/gwleve... NaN 1325.758961 138
1 2015_06_12 .xle chwnav 2015_06_12.xle chwnav True 2 Solinst 101 chwnav 1101002M00 ... 1 NaN NaN NaN NaN courthouse wash.jpg http://nwis.waterdata.usgs.gov/usa/nwis/gwleve... NaN 1325.758961 139

2 rows × 51 columns

Water Level Tranducer Data

Export and Plot Data

Export Manual Data


In [13]:
manual.reset_index(inplace=True)
manual.set_index('WellID',inplace=True)

manual["MeasuredLevel"] = np.nan
manual["Temp"] = np.nan
manual["BaroEfficiencyCorrected"] = np.nan 
manual["DeltaLevel"] = np.nan
manual["DriftCorrection"] = np.nan
manual["MeasuredBy"] = np.nan
manual["Tape"] = 1
manual["DTWBelowGroundSurface"] = np.nan
manual["WaterElevation"] = np.nan
#manualrecent["DTWBelowGroundSurface"] = np.nan
manlist= [int(i) for i in manual.index.tolist()]

print manlist
for i in manlist:
    try:
        manual.loc[i,"DTWBelowCasing"] = manual.loc[i,"MeasuredDTW"]
        manual.loc[i,"DTWBelowGroundSurface"] = manual.loc[i,"MeasuredDTW"] - wellinfo.loc[i,"Offset"]
        manual.loc[i,"WaterElevation"] = wellinfo.loc[i,'GroundElevation'] - manual.loc[i,"DTWBelowGroundSurface"]
    except(KeyError):
        pass

outpath = pathlist[0] + '\\' + pathlist[1] + '\\' + pathlist[2] + '\\' + pathlist[3] + '\\' + pathlist[4] + '\\' + 'Manual' + '.csv'  
manual.to_csv(outpath, index=True, columns= ["DateTime","MeasuredLevel","Temp","BaroEfficiencyCorrected","DeltaLevel",
                                             "MeasuredDTW","DriftCorrection","DTWBelowCasing","DTWBelowGroundSurface",
                                             "WaterElevation","Tape","MeasuredBy"])


[139, 139, 139, 139, 139, 139, 139, 139, 139]

Import All Files


In [14]:
pdf_pages = PdfPages(folder+'wells.pdf')
for i in wellinfo.loc[:,'full_file_name']:
    print i
    g = snake.imp_new_well(folder+'\\'+i, wellinfo, manual, barodata)
    glist = g.columns.tolist()
    for j in range(len(glist)):
        if 'pw' in glist[j]:
            h = glist[j]
    y1 = g['WaterElevation'].values
    y2 = g[h].values
    x1 = g['DateTime'].values
    wellname, wellid = snake.getwellid(folder+'\\'+i,wellinfo1)
    ylast = wellinfo[wellinfo['WellID']==wellid]['GroundElevation'].values[0] + wellinfo[wellinfo['WellID']==wellid]['Offset'].values[0] - fcl(manual[manual['WellID']== wellid],max(g.index.to_datetime()))[1]
    yfirst = wellinfo[wellinfo['WellID']==wellid]['GroundElevation'].values[0] + wellinfo[wellinfo['WellID']==wellid]['Offset'].values[0] - fcl(manual[manual['WellID']== wellid],min(g.index.to_datetime()))[1]
    xlast = (fcl(manual[manual['WellID']== wellid],max(g.index.to_datetime()))).name.to_datetime()
    xfirst = (fcl(manual[manual['WellID']== wellid],min(g.index.to_datetime()))).name.to_datetime()
    x4 = [xfirst,xlast]
    y4 = [yfirst,ylast]
    fig, ax1 = plt.subplots()
    ax1.scatter(x4,y4,color='purple')
    ax1.plot(x1,y1,color='blue',label='Water Level Elevation')
    ax1.set_ylabel('Water Level Elevation',color='blue')
    y_formatter = tick.ScalarFormatter(useOffset=False)
    ax1.yaxis.set_major_formatter(y_formatter)
    ax2 = ax1.twinx()
    ax2.set_ylabel('Barometric Pressure (ft)', color='red') 
    ax2.plot(x1,y2,color='red',label='Barometric pressure (ft)')
    h1, l1 = ax1.get_legend_handles_labels()
    h2, l2 = ax2.get_legend_handles_labels()
    ax1.legend(h1+h2, l1+l2, loc=3)
    
    plt.title('Well: ' + wellname.title() + '  ' + 'Total Drift = ' + str(g['DriftCorrection'][-1]))
    pdf_pages.savefig(fig)
    fig.close()
pdf_pages.close()


chwent 2015_06_12.xle
U:\GWP\Groundwater\Courthouse_Wash_Transducer_Data\chwent 2015_06_12.xle
---------------------------------------------------------------------------
IndexError                                Traceback (most recent call last)
<ipython-input-14-e69a52f00996> in <module>()
      2 for i in wellinfo.loc[:,'full_file_name']:
      3     print i
----> 4     g = snake.imp_new_well(folder+'\\'+i, wellinfo, manual, barodata)
      5     glist = g.columns.tolist()
      6     for j in range(len(glist)):

C:\Users\PAULINKENBRANDT\Documents\GitHub\Snake_Valley\snake.py in imp_new_well(infile, wellinfo, manual, baro)
    343         b = hourly_resample(baro[bp], bse)
    344         f = hourly_resample(f,bse)
--> 345         g = pd.merge(f,b,left_index=True,right_index=True,how='inner')
    346 
    347         g['MeasuredLevel'] = g['Level']

C:\Python27\ArcGIS10.3\Lib\site-packages\pandas\tools\merge.pyc in merge(left, right, how, on, left_on, right_on, left_index, right_index, sort, suffixes, copy)
     36                          right_index=right_index, sort=sort, suffixes=suffixes,
     37                          copy=copy)
---> 38     return op.get_result()
     39 if __debug__:
     40     merge.__doc__ = _merge_doc % '\nleft : DataFrame'

C:\Python27\ArcGIS10.3\Lib\site-packages\pandas\tools\merge.pyc in get_result(self)
    184 
    185     def get_result(self):
--> 186         join_index, left_indexer, right_indexer = self._get_join_info()
    187 
    188         ldata, rdata = self.left._data, self.right._data

C:\Python27\ArcGIS10.3\Lib\site-packages\pandas\tools\merge.pyc in _get_join_info(self)
    254     def _get_join_info(self):
    255         left_ax = self.left._data.axes[self.axis]
--> 256         right_ax = self.right._data.axes[self.axis]
    257         if self.left_index and self.right_index:
    258             join_index, left_indexer, right_indexer = \

IndexError: list index out of range

In [22]:
infile = Drive + ':\\GWP\\Groundwater\\Courthouse_Wash_Transducer_Data\\chwnav 2015_06_12.xle'
with open(infile) as fd:
    # parse xml
    obj = xmltodict.parse(fd.read(),encoding="ISO-8859-1")
# navigate through xml to the data
wellrawdata = obj['Body_xle']['Data']['Log']
# convert xml data to pandas dataframe
f = pd.DataFrame(wellrawdata)
# get header names and apply to the pandas dataframe
f[str(obj['Body_xle']['Ch2_data_header']['Identification']).title()] = f['ch2']

tempunit = (obj['Body_xle']['Ch2_data_header']['Unit'])
if tempunit == 'Deg C' or tempunit == u'\N{DEGREE SIGN}' + u'C':
    f[str(obj['Body_xle']['Ch2_data_header']['Identification']).title()] = f['ch2'].convert_objects(convert_numeric=True)
elif tempunit == 'Deg F' or tempunit == u'\N{DEGREE SIGN}' + u'F': 
    f[str(obj['Body_xle']['Ch2_data_header']['Identification']).title()] = f['ch2'].convert_objects(convert_numeric=True)*0.33456

unit = str(obj['Body_xle']['Ch1_data_header']['Unit']).lower()
if unit == "feet" or unit == "ft":
    f[str(obj['Body_xle']['Ch1_data_header']['Identification']).title()] = f['ch1'].convert_objects(convert_numeric=True)
elif unit == "kpa":
    f[str(obj['Body_xle']['Ch1_data_header']['Identification']).title()] = f['ch1'].convert_objects(convert_numeric=True)*0.33456
elif unit == "mbar":
    f[str(obj['Body_xle']['Ch1_data_header']['Identification']).title()] = f['ch1'].convert_objects(convert_numeric=True)*0.0334552565551
elif unit == "psi":
    f[str(obj['Body_xle']['Ch1_data_header']['Identification']).title()] = f['ch1'].convert_objects(convert_numeric=True)*2.306726
elif unit == "m" or unit == "meters":
    f[str(obj['Body_xle']['Ch1_data_header']['Identification']).title()] = f['ch1'].convert_objects(convert_numeric=True)*3.28084
else:
    f[str(obj['Body_xle']['Ch1_data_header']['Identification']).title()] = f['ch1'].convert_objects(convert_numeric=True)
    print "Unknown Units"
# add extension-free file name to dataframe
f['name'] = snake.getfilename(infile)
# combine Date and Time fields into one field
f['DateTime'] = pd.to_datetime(f.apply(lambda x: x['Date'] + ' ' + x['Time'], 1))
f[str(obj['Body_xle']['Ch1_data_header']['Identification']).title()] = f[str(obj['Body_xle']['Ch1_data_header']['Identification']).title()].convert_objects(convert_numeric=True)
f[str(obj['Body_xle']['Ch2_data_header']['Identification']).title()] = f[str(obj['Body_xle']['Ch2_data_header']['Identification']).title()].convert_objects(convert_numeric=True)
f = f.reset_index()
f = f.set_index('DateTime')
f = f.drop(['Date','Time','@id','ch1','ch2','index','ms'],axis=1)

In [23]:
f


Out[23]:
Temperature Level name
DateTime
2013-11-13 11:49:21 13.419 29.2942 chwnav 2015_06_12
2013-11-13 12:49:21 15.320 43.4643 chwnav 2015_06_12
2013-11-13 13:49:21 15.317 43.4622 chwnav 2015_06_12
2013-11-13 14:49:21 15.318 43.4616 chwnav 2015_06_12
2013-11-13 15:49:21 15.317 43.4600 chwnav 2015_06_12
2013-11-13 16:49:21 15.317 43.4430 chwnav 2015_06_12
2013-11-13 17:49:21 15.318 43.4271 chwnav 2015_06_12
2013-11-13 18:49:21 15.317 43.4072 chwnav 2015_06_12
2013-11-13 19:49:21 15.318 43.3853 chwnav 2015_06_12
2013-11-13 20:49:21 15.318 43.3687 chwnav 2015_06_12
2013-11-13 21:49:21 15.317 43.3586 chwnav 2015_06_12
2013-11-13 22:49:21 15.317 43.3546 chwnav 2015_06_12
2013-11-13 23:49:21 15.317 43.3611 chwnav 2015_06_12
2013-11-14 00:49:21 15.317 43.3823 chwnav 2015_06_12
2013-11-14 01:49:21 15.318 43.4003 chwnav 2015_06_12
2013-11-14 02:49:21 15.317 43.4259 chwnav 2015_06_12
2013-11-14 03:49:21 15.317 43.4401 chwnav 2015_06_12
2013-11-14 04:49:21 15.317 43.4458 chwnav 2015_06_12
2013-11-14 05:49:21 15.317 43.4455 chwnav 2015_06_12
2013-11-14 06:49:21 15.317 43.4324 chwnav 2015_06_12
2013-11-14 07:49:21 15.317 43.4211 chwnav 2015_06_12
2013-11-14 08:49:21 15.318 43.4098 chwnav 2015_06_12
2013-11-14 09:49:21 15.317 43.4020 chwnav 2015_06_12
2013-11-14 10:49:21 15.317 43.3946 chwnav 2015_06_12
2013-11-14 11:49:21 15.317 43.3893 chwnav 2015_06_12
2013-11-14 12:49:21 15.317 43.3765 chwnav 2015_06_12
2013-11-14 13:49:21 15.317 43.3749 chwnav 2015_06_12
2013-11-14 14:49:21 15.318 43.3705 chwnav 2015_06_12
2013-11-14 15:49:21 15.317 43.3618 chwnav 2015_06_12
2013-11-14 16:49:21 15.317 43.3498 chwnav 2015_06_12
... ... ... ...
2015-06-11 05:49:21 15.346 46.2754 chwnav 2015_06_12
2015-06-11 06:49:21 15.347 46.2619 chwnav 2015_06_12
2015-06-11 07:49:21 15.346 46.2523 chwnav 2015_06_12
2015-06-11 08:49:21 15.347 46.2505 chwnav 2015_06_12
2015-06-11 09:49:21 15.347 46.2542 chwnav 2015_06_12
2015-06-11 10:49:21 15.347 46.2761 chwnav 2015_06_12
2015-06-11 11:49:21 15.347 46.3070 chwnav 2015_06_12
2015-06-11 12:49:21 15.346 46.3180 chwnav 2015_06_12
2015-06-11 13:49:21 15.347 46.3453 chwnav 2015_06_12
2015-06-11 14:49:21 15.347 46.3334 chwnav 2015_06_12
2015-06-11 15:49:21 15.346 46.3306 chwnav 2015_06_12
2015-06-11 16:49:21 15.347 46.3204 chwnav 2015_06_12
2015-06-11 17:49:21 15.347 46.3142 chwnav 2015_06_12
2015-06-11 18:49:21 15.346 46.3059 chwnav 2015_06_12
2015-06-11 19:49:21 15.346 46.3006 chwnav 2015_06_12
2015-06-11 20:49:21 15.347 46.3001 chwnav 2015_06_12
2015-06-11 21:49:21 15.347 46.3009 chwnav 2015_06_12
2015-06-11 22:49:21 15.346 46.3076 chwnav 2015_06_12
2015-06-11 23:49:21 15.347 46.3142 chwnav 2015_06_12
2015-06-12 00:49:21 15.347 46.3281 chwnav 2015_06_12
2015-06-12 01:49:21 15.346 46.3324 chwnav 2015_06_12
2015-06-12 02:49:21 15.346 46.3353 chwnav 2015_06_12
2015-06-12 03:49:21 15.346 46.3310 chwnav 2015_06_12
2015-06-12 04:49:21 15.346 46.3242 chwnav 2015_06_12
2015-06-12 05:49:21 15.347 46.3086 chwnav 2015_06_12
2015-06-12 06:49:21 15.346 46.2906 chwnav 2015_06_12
2015-06-12 07:49:21 15.346 46.2763 chwnav 2015_06_12
2015-06-12 08:49:21 15.347 46.2625 chwnav 2015_06_12
2015-06-12 09:49:21 15.347 46.2568 chwnav 2015_06_12
2015-06-12 10:49:21 15.346 46.2598 chwnav 2015_06_12

13824 rows × 3 columns

Import One File


In [42]:
import snake
inputfile = r"U:\GWP\Groundwater\Courthouse_Wash_Transducer_Data\chwnav 2015_06_12.xle"

g = snake.imp_new_well(inputfile, wellinfo, manual, barodata)
glist = g.columns.tolist()
for j in range(len(glist)):
        if 'pw' in glist[j]:
            h = glist[j]
y1 = g['WaterElevation'].values
y2 = g[h].values
x1 = g['DateTime'].values

wellname, wellid = getwellid(inputfile)
ylast = wellinfo[wellinfo['WellID']==wellid]['GroundElevation'].values[0] + wellinfo[wellinfo['WellID']==wellid]['Offset'].values[0] - fcl(manual[manual['WellID']== wellid],max(g.index.to_datetime()))[1]
yfirst = wellinfo[wellinfo['WellID']==wellid]['GroundElevation'].values[0] + wellinfo[wellinfo['WellID']==wellid]['Offset'].values[0] - fcl(manual[manual['WellID']== wellid],min(g.index.to_datetime()))[1]
xlast = (fcl(manual[manual['WellID']== wellid],max(g.index.to_datetime()))).name.to_datetime()
xfirst = (fcl(manual[manual['WellID']== wellid],min(g.index.to_datetime()))).name.to_datetime()
x4 = [xfirst,xlast]
y4 = [yfirst,ylast]

fig, ax1 = plt.subplots()
ax1.scatter(x4,y4,color='purple')
ax1.plot(x1,y1,color='red')
y_formatter = tick.ScalarFormatter(useOffset=False)
ax1.yaxis.set_major_formatter(y_formatter)
ax2 = ax1.twinx()
ax2.plot(x1,y2,color='blue')
plt.title(getfilename(inputfile)+'  '+str(g['DriftCorrection'][-1]))
plt.show()


U:\GWP\Groundwater\Courthouse_Wash_Transducer_Data\chwnav 2015_06_12.xle
---------------------------------------------------------------------------
IndexError                                Traceback (most recent call last)
<ipython-input-42-acd02ecf899b> in <module>()
      2 inputfile = r"U:\GWP\Groundwater\Courthouse_Wash_Transducer_Data\chwnav 2015_06_12.xle"
      3 
----> 4 g = snake.imp_new_well(inputfile, wellinfo, manual, barodata)
      5 glist = g.columns.tolist()
      6 for j in range(len(glist)):

C:\Users\PAULINKENBRANDT\Documents\GitHub\Snake_Valley\snake.py in imp_new_well(infile, wellinfo, manual, baro)
    343             b = hourly_resample(baro['Level'], bse)
    344             b = b.to_frame()
--> 345             b['bp'] = b['Level']
    346             bp = 'bp'
    347 

C:\Python27\ArcGIS10.3\Lib\site-packages\pandas\tools\merge.pyc in merge(left, right, how, on, left_on, right_on, left_index, right_index, sort, suffixes, copy)
     36                          right_index=right_index, sort=sort, suffixes=suffixes,
     37                          copy=copy)
---> 38     return op.get_result()
     39 if __debug__:
     40     merge.__doc__ = _merge_doc % '\nleft : DataFrame'

C:\Python27\ArcGIS10.3\Lib\site-packages\pandas\tools\merge.pyc in get_result(self)
    184 
    185     def get_result(self):
--> 186         join_index, left_indexer, right_indexer = self._get_join_info()
    187 
    188         ldata, rdata = self.left._data, self.right._data

C:\Python27\ArcGIS10.3\Lib\site-packages\pandas\tools\merge.pyc in _get_join_info(self)
    254     def _get_join_info(self):
    255         left_ax = self.left._data.axes[self.axis]
--> 256         right_ax = self.right._data.axes[self.axis]
    257         if self.left_index and self.right_index:
    258             join_index, left_indexer, right_indexer = \

IndexError: list index out of range

In [50]:
wellname, wellid = snake.getwellid(infile,wellinfo) #see custom getwellid function
f = snake.new_xle_imp(infile)
bse = int(f.index.to_datetime().minute[0])
try:
    bp = wellinfo[wellinfo['Well']==wellname]['BE barologger']
    b = snake.hourly_resample(barodata[bp], bse)
    b = b.to_frame()
except (KeyError,NameError):
    bp = u'Level' 
    b = snake.hourly_resample(barodata['Level'], bse)
    b = b.to_frame()
    b['bp'] = b['Level']
    b.drop(['Level'],inplace=True,axis=1)
f = snake.hourly_resample(f,bse)
print f
g = pd.merge(f,b,left_index=True,right_index=True,how='inner',sort=True)
print g
g['MeasuredLevel'] = g['Level']         

# Remove first and/or last measurements if the transducer was out of the water
## Examine First Value
firstupper = np.mean(g['MeasuredLevel'].diff()[2:31]) + np.std(g['MeasuredLevel'].diff()[2:31])*2.2 # 2.2 std dev.
firstlower = np.mean(g['MeasuredLevel'].diff()[2:31]) - np.std(g['MeasuredLevel'].diff()[2:31])*2.2 # 2.2 std dev.
firstlev = g['MeasuredLevel'].diff()[1:2].values[0] # difference of first two values
## Examine Last Value
lastupper = np.mean(g['MeasuredLevel'].diff()[-31:-2]) + np.std(g['MeasuredLevel'].diff()[-31:-2])*2.2 # 2.2 std dev.
lastlower = np.mean(g['MeasuredLevel'].diff()[-31:-2]) - np.std(g['MeasuredLevel'].diff()[-31:-2])*2.2 # 2.2 std dev.
lastlev = g['MeasuredLevel'].diff()[-2:-1].values[0] # difference of last two values
## drop first value if 2.2 std dev beyond first 30 values
if np.abs(firstlev) > 0.1:
    if firstlev > firstupper or firstlev < firstlower:
        g.drop(g.index[0],inplace=True)
## drop last value if 2.2 std dev beyond last 30 values
if np.abs(lastlev) > 0.1:
    if lastlev > lastupper or lastlev < lastlower:
        g.drop(g.index[-1],inplace=True)

glist = f.columns.tolist()
if 'Temperature' in glist:
    g['Temp'] = g['Temperature']
    g.drop(['Temperature'],inplace=True,axis=1)
elif 'Temp' in glist:
    pass
# Get Baro Efficiency
be = wellinfo[wellinfo['WellID']==wellid]['BaroEfficiency']
be = be.iloc[0]

# Barometric Efficiency Correction
g['BaroEfficiencyCorrected'] = g['MeasuredLevel'] - g[bp] + be*g[bp]


                     Temperature      Level               name
DateTime                                                      
2014-05-09 11:57:00       27.654  28.852363  chwent 2015_06_12
2014-05-09 12:57:00       15.225  52.778217  chwent 2015_06_12
2014-05-09 13:57:00       17.255  53.972115  chwent 2015_06_12
2014-05-09 14:57:00       19.618  54.789700  chwent 2015_06_12
2014-05-09 15:57:00       21.672  55.193243  chwent 2015_06_12
2014-05-09 16:57:00       22.480  55.470802  chwent 2015_06_12
2014-05-09 17:57:00       22.616  55.720146  chwent 2015_06_12
2014-05-09 18:57:00       21.660  55.898952  chwent 2015_06_12
2014-05-09 19:57:00       20.740  56.026249  chwent 2015_06_12
2014-05-09 20:57:00       19.754  56.158466  chwent 2015_06_12
2014-05-09 21:57:00       18.630  56.229989  chwent 2015_06_12
2014-05-09 22:57:00       17.354  56.331695  chwent 2015_06_12
2014-05-09 23:57:00       16.337  56.441603  chwent 2015_06_12
2014-05-10 00:57:00       15.613  56.529857  chwent 2015_06_12
2014-05-10 01:57:00       14.999  56.616144  chwent 2015_06_12
2014-05-10 02:57:00       14.331  56.693899  chwent 2015_06_12
2014-05-10 03:57:00       13.853  56.773296  chwent 2015_06_12
2014-05-10 04:57:00       13.523  56.792653  chwent 2015_06_12
2014-05-10 05:57:00       13.181  56.776577  chwent 2015_06_12
2014-05-10 06:57:00       12.822  56.776249  chwent 2015_06_12
2014-05-10 07:57:00       12.889  56.807745  chwent 2015_06_12
2014-05-10 08:57:00       13.294  56.879595  chwent 2015_06_12
2014-05-10 09:57:00       13.931  56.931104  chwent 2015_06_12
2014-05-10 10:57:00       14.973  56.948821  chwent 2015_06_12
2014-05-10 11:57:00       15.990  56.994424  chwent 2015_06_12
2014-05-10 12:57:00       16.994  57.022312  chwent 2015_06_12
2014-05-10 13:57:00       18.253  57.089241  chwent 2015_06_12
2014-05-10 14:57:00       20.070  57.162731  chwent 2015_06_12
2014-05-10 15:57:00       21.631  57.189962  chwent 2015_06_12
2014-05-10 16:57:00       22.369  57.222443  chwent 2015_06_12
...                          ...        ...                ...
2015-06-11 06:57:00       17.823  43.588584  chwent 2015_06_12
2015-06-11 07:57:00       17.912  43.635828  chwent 2015_06_12
2015-06-11 08:57:00       18.190  43.782482  chwent 2015_06_12
2015-06-11 09:57:00       18.628  44.062994  chwent 2015_06_12
2015-06-11 10:57:00       19.336  44.536091  chwent 2015_06_12
2015-06-11 11:57:00       19.065  44.768374  chwent 2015_06_12
2015-06-11 12:57:00       18.419  44.354660  chwent 2015_06_12
2015-06-11 13:57:00       18.310  44.172902  chwent 2015_06_12
2015-06-11 14:57:00       19.185  44.617455  chwent 2015_06_12
2015-06-11 15:57:00       20.643  45.674214  chwent 2015_06_12
2015-06-11 16:57:00       21.267  46.357941  chwent 2015_06_12
2015-06-11 17:57:00       21.725  46.950133  chwent 2015_06_12
2015-06-11 18:57:00       22.265  47.496065  chwent 2015_06_12
2015-06-11 19:57:00       21.705  47.564962  chwent 2015_06_12
2015-06-11 20:57:00       20.354  46.787731  chwent 2015_06_12
2015-06-11 21:57:00       19.340  45.812337  chwent 2015_06_12
2015-06-11 22:57:00       18.791  45.089896  chwent 2015_06_12
2015-06-11 23:57:00       18.501  44.650920  chwent 2015_06_12
2015-06-12 00:57:00       18.286  44.343177  chwent 2015_06_12
2015-06-12 01:57:00       18.092  44.142718  chwent 2015_06_12
2015-06-12 02:57:00       17.923  43.968833  chwent 2015_06_12
2015-06-12 03:57:00       17.697  43.796261  chwent 2015_06_12
2015-06-12 04:57:00       17.528  43.683400  chwent 2015_06_12
2015-06-12 05:57:00       17.299  43.586287  chwent 2015_06_12
2015-06-12 06:57:00       17.394  43.666668  chwent 2015_06_12
2015-06-12 07:57:00       17.986  44.006563  chwent 2015_06_12
2015-06-12 08:57:00       19.208  44.885172  chwent 2015_06_12
2015-06-12 09:57:00       20.450  46.197508  chwent 2015_06_12
2015-06-12 10:57:00       21.464  47.162403  chwent 2015_06_12
2015-06-12 11:57:00       22.213  47.814306  chwent 2015_06_12

[9577 rows x 3 columns]
                     Temperature      Level               name         bp
DateTime                                                                 
2014-05-09 11:57:00       27.654  28.852363  chwent 2015_06_12  28.845832
2014-05-09 12:57:00       15.225  52.778217  chwent 2015_06_12  28.817109
2014-05-09 13:57:00       17.255  53.972115  chwent 2015_06_12  28.784355
2014-05-09 14:57:00       19.618  54.789700  chwent 2015_06_12  28.753417
2014-05-09 15:57:00       21.672  55.193243  chwent 2015_06_12  28.739879
2014-05-09 16:57:00       22.480  55.470802  chwent 2015_06_12  28.731964
2014-05-09 17:57:00       22.616  55.720146  chwent 2015_06_12  28.727523
2014-05-09 18:57:00       21.660  55.898952  chwent 2015_06_12  28.735174
2014-05-09 19:57:00       20.740  56.026249  chwent 2015_06_12  28.756469
2014-05-09 20:57:00       19.754  56.158466  chwent 2015_06_12  28.775509
2014-05-09 21:57:00       18.630  56.229989  chwent 2015_06_12  28.792904
2014-05-09 22:57:00       17.354  56.331695  chwent 2015_06_12  28.810878
2014-05-09 23:57:00       16.337  56.441603  chwent 2015_06_12  28.803196
2014-05-10 00:57:00       15.613  56.529857  chwent 2015_06_12  28.803358
2014-05-10 01:57:00       14.999  56.616144  chwent 2015_06_12  28.794486
2014-05-10 02:57:00       14.331  56.693899  chwent 2015_06_12  28.804569
2014-05-10 03:57:00       13.853  56.773296  chwent 2015_06_12  28.796497
2014-05-10 04:57:00       13.523  56.792653  chwent 2015_06_12  28.793657
2014-05-10 05:57:00       13.181  56.776577  chwent 2015_06_12  28.792281
2014-05-10 06:57:00       12.822  56.776249  chwent 2015_06_12  28.792991
2014-05-10 07:57:00       12.889  56.807745  chwent 2015_06_12  28.784037
2014-05-10 08:57:00       13.294  56.879595  chwent 2015_06_12  28.776572
2014-05-10 09:57:00       13.931  56.931104  chwent 2015_06_12  28.780894
2014-05-10 10:57:00       14.973  56.948821  chwent 2015_06_12  28.759313
2014-05-10 11:57:00       15.990  56.994424  chwent 2015_06_12  28.744790
2014-05-10 12:57:00       16.994  57.022312  chwent 2015_06_12  28.698340
2014-05-10 13:57:00       18.253  57.089241  chwent 2015_06_12  28.665637
2014-05-10 14:57:00       20.070  57.162731  chwent 2015_06_12  28.623494
2014-05-10 15:57:00       21.631  57.189962  chwent 2015_06_12  28.580356
2014-05-10 16:57:00       22.369  57.222443  chwent 2015_06_12  28.552233
...                          ...        ...                ...        ...
2015-06-11 04:57:00       17.986  43.680448  chwent 2015_06_12  28.848989
2015-06-11 05:57:00       17.891  43.604988  chwent 2015_06_12  28.856979
2015-06-11 06:57:00       17.823  43.588584  chwent 2015_06_12  28.865496
2015-06-11 07:57:00       17.912  43.635828  chwent 2015_06_12  28.869874
2015-06-11 08:57:00       18.190  43.782482  chwent 2015_06_12  28.873649
2015-06-11 09:57:00       18.628  44.062994  chwent 2015_06_12  28.879447
2015-06-11 10:57:00       19.336  44.536091  chwent 2015_06_12  28.899639
2015-06-11 11:57:00       19.065  44.768374  chwent 2015_06_12  28.913004
2015-06-11 12:57:00       18.419  44.354660  chwent 2015_06_12  28.908555
2015-06-11 13:57:00       18.310  44.172902  chwent 2015_06_12  28.891094
2015-06-11 14:57:00       19.185  44.617455  chwent 2015_06_12  28.878863
2015-06-11 15:57:00       20.643  45.674214  chwent 2015_06_12  28.880983
2015-06-11 16:57:00       21.267  46.357941  chwent 2015_06_12  28.875929
2015-06-11 17:57:00       21.725  46.950133  chwent 2015_06_12  28.884511
2015-06-11 18:57:00       22.265  47.496065  chwent 2015_06_12  28.893252
2015-06-11 19:57:00       21.705  47.564962  chwent 2015_06_12  28.900743
2015-06-11 20:57:00       20.354  46.787731  chwent 2015_06_12  28.911410
2015-06-11 21:57:00       19.340  45.812337  chwent 2015_06_12  28.910823
2015-06-11 22:57:00       18.791  45.089896  chwent 2015_06_12  28.912855
2015-06-11 23:57:00       18.501  44.650920  chwent 2015_06_12  28.905499
2015-06-12 00:57:00       18.286  44.343177  chwent 2015_06_12  28.908206
2015-06-12 01:57:00       18.092  44.142718  chwent 2015_06_12  28.903132
2015-06-12 02:57:00       17.923  43.968833  chwent 2015_06_12  28.904525
2015-06-12 03:57:00       17.697  43.796261  chwent 2015_06_12  28.908218
2015-06-12 04:57:00       17.528  43.683400  chwent 2015_06_12  28.920431
2015-06-12 05:57:00       17.299  43.586287  chwent 2015_06_12  28.931586
2015-06-12 06:57:00       17.394  43.666668  chwent 2015_06_12  28.938038
2015-06-12 07:57:00       17.986  44.006563  chwent 2015_06_12  28.944520
2015-06-12 08:57:00       19.208  44.885172  chwent 2015_06_12  28.942322
2015-06-12 09:57:00       20.450  46.197508  chwent 2015_06_12  28.935349

[9575 rows x 4 columns]

Combine Output


In [ ]:
q = {}

for i in wellinfo.loc[:,'full_file_name']:
    wellname, wellid = getwellid(folder+'\\'+i)
    q[wellname] = imp_new_well(folder+'\\'+i, wellinfo, manual)

In [ ]:
q.names.to_list()

In [ ]:
alldf = ag13a.append(q)
outpath = pathlist[0] + '\\' + pathlist[1] + '\\' + pathlist[2] + '\\' + pathlist[3] + '\\' + pathlist[4] + '\\' + 'all' + '.csv'
alldf.to_csv(outpath, index=False, columns= ["WellID","DateTime","MeasuredLevel","Temp","BaroEfficiencyCorrected","DeltaLevel",
                                             "MeasuredDTW","DriftCorrection","DTWBelowCasing","DTWBelowGroundSurface",
                                             "WaterElevation","Tape","MeasuredBy"])

In [ ]:
dailyavgfiles = r"U:\GWP\Snake Valley Water\Transducer Data\Individual Sites\Daily Averages"

In [ ]:
well = r"U:\GWP\Snake Valley Water\Transducer Data\Individual Sites\Daily Averages\ag13b.xlsx"

In [ ]:
hourly = pd.read_excel(well,'input',parse_dates=True,index_col='DateTime')
hwl = hourly["WaterElevation"].resample('D',how='mean')
hwl = hwl.interpolate(how='Time')
hwl.plot()

In [ ]: