In [1]:
%matplotlib inline
import os
import sys
import platform
import matplotlib
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.dates as dates
import matplotlib.ticker as tick
from matplotlib.backends.backend_pdf import PdfPages
import datetime
from pylab import rcParams
rcParams['figure.figsize'] = 15, 10
In [36]:
import loggerloader as ll
import wellapplication as wa
import arcpy
In [3]:
print("Operating System " + platform.system() + " " + platform.release())
print("Python Version " + str(sys.version))
print("Pandas Version " + str(pd.__version__))
print("Numpy Version " + str(np.__version__))
print("Matplotlib Version " + str(matplotlib.__version__))
print("Loggerloader Version {:}".format(ll.__version__))
In [4]:
drive = 'M:'
raw_archive_folder = drive + '/PROJECTS/Snake Valley Water/Transducer Data/Raw_data_archive'
folder = raw_archive_folder + '/2017/'
enteredFolder = folder + '/entered/'
checkFolder = folder + '/toCheck/'
#wellinfofile = drive + raw_archive_folder + '/well table 2015-03-23.csv'
In [5]:
if not os.path.exists(enteredFolder):
print('Creating Output Folder')
os.makedirs(enteredFolder)
else:
print('Output Folder Exists')
In [6]:
if not os.path.exists(checkFolder):
print('Creating Check Folder')
os.makedirs(checkFolder)
else:
print('Check Folder Exists')
Inputs for connection file and tables
In [7]:
conn_file_root = "C:/Users/PAULINKENBRANDT/AppData/Roaming/ESRI/Desktop10.4/ArcCatalog/"
conn_file = "UGS_SDE.sde" #production
arcpy.env.workspace = conn_file_root + conn_file
#gw_reading_table = "UGGP.UGGPADMIN.UGS_GW_reading"
#station_table = "UGGP.UGGPADMIN.UGS_NGWMN_Monitoring_Locations"
read over raw files -> append baro to db -> import well xle -> pull bp data from db -> remove bp press -> import manual measurements -> fix drift -> remove stickup -> include elevation
In [43]:
wellid = '43'
station_table = "UGGP.UGGPADMIN.UGS_NGWMN_Monitoring_Locations"
#arcpy.env.workspace = self.sde_conn
loc_table = "UGGP.UGGPADMIN.UGS_NGWMN_Monitoring_Locations"
field_names = ['LocationID', 'LocationName', 'LocationType', 'LocationDesc', 'AltLocationID', 'Altitude',
'AltitudeUnits', 'WellDepth', 'SiteID', 'Offset', 'LoggerType', 'BaroEfficiency',
'BaroEfficiencyStart', 'BaroLoggerType']
df = pd.DataFrame(columns=field_names)
# use a search cursor to iterate rows
search_cursor = arcpy.da.SearchCursor(loc_table, field_names)
# iterate the rows
for row in search_cursor:
# combine the field names and row items together, and append them
df = df.append(dict(zip(field_names, row)), ignore_index=True)
iddict = dict(zip(df['LocationName'].values,df['AltLocationID'].values))
well_table = df.set_index(['AltLocationID'])
baroid = well_table.loc[wellid,'BaroLoggerType']
stickup = well_table.loc[wellid,'Offset']
well_elev = well_table.loc[wellid,'Altitude']
be = well_table.loc[wellid,'BaroEfficiency']
In [40]:
Out[40]:
In [31]:
['LocationID', 'LocationName', 'LocationType', 'LocationDesc', 'AltLocationID', 'Altitude', 'AltitudeUnits',
'WellDepth', 'SiteID', 'Offset', 'LoggerType', 'BaroEfficiency', 'BaroEfficiencyStart', 'BaroLoggerType']
In [14]:
fc = "UGGP.UGGPADMIN.UGS_NGWMN_Monitoring_Locations"
field = "LocationName"
cursor = arcpy.SearchCursor(fc)
ll.table_to_pandas_dataframe(fc, field_names=['LocationName''AltLocationID'])
for row in cursor:
print(row.getValue(field))
Out[14]:
In [15]:
os.environ.get('USERNAME')
Out[15]:
In [16]:
os.path.splitext(well_file)[1] == '.xle'
Out[16]:
In [14]:
well_file = folder+'pw03z 20170309.xle'
baro_file = folder+'pw03baro 20170309.xle'
#ll.imp_one_well(well_file,baro_file,)
In [ ]:
well_table = ll.match_files_to_wellid(folder)
#query barometers to a single table
well_table.to_pickle(folder+'well_table.pickle')
In [ ]:
bpunits = []
baroid = well_table.loc[wellid,'BaroLoggerType']
stickup = well_table.loc[wellid,'Offset']
well_elev = well_table.loc[wellid,'Altitude']
be = well_table.loc[wellid,'BaroEfficiency']
In [13]:
well_table = ll.match_files_to_wellid(folder)
#query barometers to a single table
well_table.to_pickle(folder+'well_table.pickle')
In [ ]:
maxtime = max(pd.to_datetime(well_table['Stop_time']))
mintime = min(pd.to_datetime(well_table['Start_time']))
print('Pulling Barometric Pressure data from {:} to {:}'.format(mintime, maxtime))
In [ ]:
bpunits = []
for ind in well_table.index:
if 'baro' in str(ind) or 'baro' in str(well_table.loc[ind,'Location']):
bpunits.append(well_table.loc[ind,'WellID'])
baro_out = {}
for baroid in bpunits:
baro_out[baroid] = ll.get_location_data(gw_reading_table, baroid, mintime,
maxtime + datetime.timedelta(days=1))
baro_out[baroid].to_pickle(folder+str(baroid)+'.pickle')
In [9]:
well_table =pd.read_pickle(folder+'well_table.pickle')
bpunits = []
for ind in well_table.index:
if 'baro' in str(ind) or 'baro' in str(well_table.loc[ind,'Location']):
bpunits.append(well_table.loc[ind,'WellID'])
baro_out = {}
for baroid in bpunits:
baro_out[baroid] = pd.read_pickle(folder + str(baroid) + '.pickle')
In [10]:
manualwls = raw_archive_folder + '/All tape measurements.csv'
manual = pd.read_csv(manualwls, index_col="DateTime", engine="python")
In [ ]:
man_startdate = '1/1/2001'
man_endate = '10/11/2002'
man_start_level = 10
man_end_level =15.1
arcpy.AddMessage('Well {:} successfully imported!'.format(ind))
In [11]:
barocolumn='MEASUREDLEVEL'
dft_ln = {}
dft_st = {}
from matplotlib.backends.backend_pdf import PdfPages
pdf_pages = PdfPages(folder + '/wells.pdf')
welltest = well_table.index.values
for ind in welltest:
# import well file
df, man, be, drift = ll.imp_well(well_table,ind,manual,baro_out)
# plot data
y1 = df['WATERELEVATION'].values
y2 = df['barometer'].values
x1 = df.index.values
x2 = df.index.values
x4 = man.index
y4 = man['Meas_GW_Elev']
fig, ax1 = plt.subplots()
ax1.scatter(x4,y4,color='purple')
ax1.plot(x1,y1,color='blue',label='Water Level Elevation')
ax1.set_ylabel('Water Level Elevation',color='blue')
ax1.set_ylim(min(df['WATERELEVATION']),max(df['WATERELEVATION']))
y_formatter = tick.ScalarFormatter(useOffset=False)
ax1.yaxis.set_major_formatter(y_formatter)
ax2 = ax1.twinx()
ax2.set_ylabel('Barometric Pressure (ft)', color='red')
ax2.plot(x2,y2,color='red',label='Barometric pressure (ft)')
h1, l1 = ax1.get_legend_handles_labels()
h2, l2 = ax2.get_legend_handles_labels()
ax1.legend(h1+h2, l1+l2, loc=3)
plt.xlim(df.first_valid_index()-datetime.timedelta(days=3),df.last_valid_index()+datetime.timedelta(days=3))
plt.title('Well: {:} Drift: {:} Baro. Eff.: {:}'.format(ind,drift,be))
pdf_pages.savefig(fig)
plt.close()
pdf_pages.close()
print("DONE!")
print("Files in "+ folder+'\\wells.pdf')
In [12]:
pdf_pages.savefig(fig)
plt.close()
pdf_pages.close()
print("DONE!")
print("Files in "+ folder+'\\wells.pdf')
In [ ]:
def baro_eff(df, bp, wl, lag=200):
import statsmodels.tsa.tsatools as tools
df.dropna(inplace=True)
dwl = df[wl].diff().values[1:-1]
dbp = df[bp].diff().values[1:-1]
# dwl = df[wl].values[1:-1]
# dbp = df[bp].values[1:-1]
df['j_dates'] = df.index.to_julian_date()
lag_time = df['j_dates'].diff().cumsum().values[1:-1]
df.drop('j_dates', axis=1, inplace=True)
# Calculate BP Response Function
## create lag matrix for regression
bpmat = tools.lagmat(dbp, lag, original='in')
## transpose matrix to determine required length
## run least squared regression
sqrd = np.linalg.lstsq(bpmat, dwl)
wlls = sqrd[0]
cumls = np.cumsum(wlls)
negcumls = [-1 * cumls[i] for i in range(len(cumls))]
ymod = np.dot(bpmat, wlls)
## resid gives the residual of the bp
resid = [(dwl[i] - ymod[i]) for i in range(len(dwl))]
lag_trim = lag_time[0:len(cumls)]
return negcumls, cumls, ymod, resid, lag_time, dwl, dbp, wlls
In [ ]:
baro_eff(df,'barometer','Level')
In [ ]:
# plot data
wl = 'cor2'
y1 = df[wl].values
y2 = df['barometer'].values
x1 = df.index.values
x2 = df.index.values
x4 = man.index
y4 = man['Meas_GW_Elev']
fig, ax1 = plt.subplots()
ax1.scatter(x4,y4,color='purple')
ax1.plot(x1,y1,color='blue',label='Water Level Elevation')
ax1.set_ylabel('Water Level Elevation',color='blue')
ax1.set_ylim(min(df[wl]),max(df[wl]))
y_formatter = tick.ScalarFormatter(useOffset=False)
ax1.yaxis.set_major_formatter(y_formatter)
ax2 = ax1.twinx()
ax2.set_ylabel('Barometric Pressure (ft)', color='red')
ax2.plot(x2,y2,color='red',label='Barometric pressure (ft)')
h1, l1 = ax1.get_legend_handles_labels()
h2, l2 = ax2.get_legend_handles_labels()
ax1.legend(h1+h2, l1+l2, loc=3)
plt.xlim(df.first_valid_index()-datetime.timedelta(days=3),df.last_valid_index()+datetime.timedelta(days=3))
plt.title('Well: {:} Drift: {:} Baro. Eff.: {:}'.format(ind,drift,be))
In [ ]:
df['cor2'] = df[['Level', 'barometer']].\
apply(lambda x: x[0] + 0.25 * (x[1]), 1)
In [ ]:
df['barometer'][0]
In [ ]:
plt.scatter(df['Level'].diff(), df['barometer'].diff())
import statsmodels.api as sm
df['dbp'] = df['barometer'].diff()
df['dwl'] = df['corrwl'].diff()
df1 = df.dropna(subset=['dbp','dwl'])
x = df1['dbp']
y = df1['dwl']
X = sm.add_constant(x)
model = sm.OLS(y, X).fit()
# y_reg = [data.ix[i,'Sbp']*m+b for i in range(len(data['Sbp']))]
b = model.params[0]
m = model.params[1]
r = model.rsquared
print(m,r)
In [ ]:
be, intc, r = ll.clarks(df[500:600],'barometer','corrwl')
print(be,intc,r)
In [ ]:
#baro_twin = lgld.compilation("H:/SnakeValley/Wetlands/Baro/1044788*")
baro_leland = lgld.compilation("H:/SnakeValley/Wetlands/Baro/*1044779*")
In [ ]:
baro_twin = lgld.compilation("H:/SnakeValley/Wetlands/Baro/*1044788*")
In [ ]:
xlefiles = xle_head_table("H:/SnakeValley/Wetlands/Baro/")
In [ ]:
#xlefiles.index = xlefiles.index.droplevel(level=1)
xlefiles.index.name = 'filename'
In [ ]:
pw03baro_append = folder + "\\pw03 baro 2016-08-03.xle"
pw10baro_append = folder + "\\pw10 baro 2016-08-03.xle"
pw19baro_append = folder + "\\pw19 baro 2016-08-04.xle"
In [ ]:
pw03baro = raw_archive_folder + "\\pw03baro.csv"
pw10baro = raw_archive_folder + "\\pw10baro.csv"
pw19baro = raw_archive_folder + "\\pw19baro.csv"
In [ ]:
wa.appendomatic(pw03baro_append,pw03baro)
wa.appendomatic(pw10baro_append,pw10baro)
wa.appendomatic(pw19baro_append,pw19baro)
In [ ]:
# duplicated to update changes made by appendomatic
pw03baro = pd.read_csv(raw_archive_folder + "\\pw03baro.csv",index_col='DateTime',parse_dates=True)
pw10baro = pd.read_csv(raw_archive_folder + "\\pw10baro.csv",index_col='DateTime',parse_dates=True)
pw19baro = pd.read_csv(raw_archive_folder + "\\pw19baro.csv",index_col='DateTime',parse_dates=True)
In [ ]:
pw03baro = lgld.hourly_resample(pw03baro)
pw10baro = lgld.hourly_resample(pw10baro)
pw19baro = lgld.hourly_resample(pw19baro)
In [ ]:
baro = pd.merge(pw03baro,pw10baro,how="outer",left_index=True,right_index=True)
baro = pd.merge(baro,pw19baro,how="outer",left_index=True,right_index=True)
baro.dropna(axis=0,inplace=True)
baro['integr'] = 0 #for vented transducers
In [ ]:
baro[['pw03','pw10','pw19']].plot()
In [ ]:
pw03baro['Temperature'].plot()
In [ ]:
In [ ]:
In [ ]:
In [ ]:
barodict = {"PW10 Barometer":9027, "PW19 Barometer":9049, "SG25 Barometer":9061,
"Leland-Harris Barometer":9025, "Twin Springs Barometer":9024, "PW03 Barometer":9003}
bpdict = {'pw03':'9003','pw10':'9027','pw19':'9049','twin':'9024','leland':'9025'}
In [ ]:
In [ ]:
baro.to_csv(raw_archive_folder + '\\baro.csv')
In [ ]:
'ReadingID',
'WellID',
'DateTime',
'MeasuredLevel',
'Temp',
In [ ]:
manualwls = raw_archive_folder + "\\All tape measurements.csv"
manual = pd.read_csv(manualwls, index_col="DateTime", engine="python")
manualrecent = manual[manual.index.to_datetime() > pd.datetime(2015,6,1)]
manualrecent.dropna(inplace=True)
manualrecent.reset_index(inplace=True)
print manualrecent.dtypes
manualrecent = pd.merge(manualrecent, wellinfo, how='left',left_on='WellID', right_index=True)
manualrecent.loc[:,"MeasuredLevel"] = np.nan
manualrecent.loc[:,"Temp"] = np.nan
manualrecent.loc[:,"BaroEfficiencyCorrected"] = np.nan
manualrecent.loc[:,"DeltaLevel"] = np.nan
manualrecent.loc[:,"DriftCorrection"] = np.nan
manualrecent.loc[:,"MeasuredBy"] = np.nan
manualrecent.loc[:,"Tape"] = 1
manualrecent.loc[:,"DTWBelowGroundSurface"] = np.nan
manualrecent.loc[:,"WaterElevation"] = np.nan
#manualrecent["DTWBelowGroundSurface"] = np.nan
manualrecent.loc[:,"DTWBelowCasing"] = manualrecent.loc[:,"MeasuredDTW"]
manualrecent.loc[:,"DTWBelowGroundSurface"] = manualrecent.loc[:,"MeasuredDTW"] - manualrecent.loc[:,"Offset"]
manualrecent.loc[:,"WaterElevation"] = manualrecent.loc[:,'GroundElevation'] - manualrecent.loc[:,"DTWBelowGroundSurface"]
print manualrecent
#outpath = pathlist[0] + '\\' + pathlist[1] + '\\' + pathlist[2] + '\\' + pathlist[3] + '\\' + pathlist[4] + '\\' + 'Manual' + '.csv'
manualrecent.to_csv(raw_archive_folder+ 'Manual' + '.csv', index=True, columns= ["WellID","DateTime","MeasuredLevel","Temp","BaroEfficiencyCorrected","DeltaLevel",
"MeasuredDTW","DriftCorrection","DTWBelowCasing","DTWBelowGroundSurface",
"WaterElevation","Tape","MeasuredBy"])
In [ ]:
manual['DateTime'] = manual.index.to_datetime()
manual.to_csv(raw_archive_folder+ 'Manual' + '.csv', index=False, columns= ["WellID","DateTime","MeasuredLevel","Temp","BaroEfficiencyCorrected","DeltaLevel",
"MeasuredDTW","DriftCorrection","DTWBelowCasing","DTWBelowGroundSurface",
"WaterElevation","Tape","MeasuredBy"])
In [ ]:
print wellinfo.loc[:,'full_file_name']
In [ ]:
manualwls = raw_archive_folder + '/All tape measurements.csv'
manual = pd.read_csv(manualwls, index_col="DateTime", engine="python")
barofile = raw_archive_folder + '/baro.csv'
baro = pd.read_csv(barofile,index_col=0, parse_dates=True)
wellinfo = pd.read_csv(folder + '/wellinfo4.csv')
In [ ]:
engine = engineGetter.getEngine()
In [ ]:
form = widgets.VBox()
welllist = ["pw07mx 2015-11-30.csv","pw02a 11-30-2015.xle","pw02a 2016-07-07.xle","sg23b 2016-05-02.xle"]
welllist = welllist+ list(wellinfo.loc[:,'full_file_name'].values)
#print welllist
wells = widgets.SelectMultiple(description="Well", options=welllist, padding=4)
pdfName = widgets.Text(description="PDF:",padding=4)
driftTol = widgets.FloatSlider(value=0.05, min=0.00, max=10.0, step=0.05, description='Drift Tolerance:')
form.children = [wells, pdfName, driftTol]
display(form)
In [ ]:
for i in wells.value:
print folder+'/'+i
inputfile = folder +'/'+i
g, drift, wellname = svdi.imp_new_well(inputfile, wellinfo, manual, baro)
glist = g.columns.tolist()
y1 = g['WaterElevation'].values
y2 = baro['pw03'].values
x1 = g.index.values
x2 = baro.index.values
wellname, wellid = svdi.getwellid(folder+'\\'+i,wellinfo)
ylast = wellinfo[wellinfo['WellID']==wellid]['GroundElevation'].values[0] + wellinfo[wellinfo['WellID']==wellid]['Offset'].values[0] - svdi.fcl(manual[manual['WellID']== wellid],max(g.index.to_datetime()))[1]
yfirst = wellinfo[wellinfo['WellID']==wellid]['GroundElevation'].values[0] + wellinfo[wellinfo['WellID']==wellid]['Offset'].values[0] - svdi.fcl(manual[manual['WellID']== wellid],min(g.index.to_datetime()))[1]
xlast = pd.to_datetime((svdi.fcl(manual[manual['WellID']== wellid],max(pd.to_datetime(g.index)))).name)
xfirst = pd.to_datetime((svdi.fcl(manual[manual['WellID']== wellid],min(pd.to_datetime(g.index)))).name)
x4 = [xfirst,xlast]
y4 = [yfirst,ylast]
fig, ax1 = plt.subplots()
ax1.scatter(x4,y4,color='purple')
ax1.plot(x1,y1,color='blue',label='Water Level Elevation')
ax1.set_ylabel('Water Level Elevation',color='blue')
y_formatter = tick.ScalarFormatter(useOffset=False)
ax1.yaxis.set_major_formatter(y_formatter)
ax2 = ax1.twinx()
ax2.set_ylabel('Barometric Pressure (ft)', color='red')
ax2.plot(x2,y2,color='red',label='Barometric pressure (ft)')
h1, l1 = ax1.get_legend_handles_labels()
h2, l2 = ax2.get_legend_handles_labels()
ax1.legend(h1+h2, l1+l2, loc=3)
plt.xlim(xfirst-timedelta(days=3),xlast+timedelta(days=3))
plt.title('Well: ' + wellname.title() + ' ' + 'Total Drift = ' + str(g['DriftCorrection'][-1]))
In [ ]:
env.workspace = "C:/Users/PAULINKENBRANDT/AppData/Roaming/ESRI/Desktop10.4/ArcCatalog/UEMP_Dev.sde"
read_table = "UEMP_Dev.UEMPADMIN.GW_reading"
arcpy.env.overwriteOutput=True
edit = arcpy.da.Editor(env.workspace)
edit.startEditing(False, True)
edit.startOperation()
In [ ]:
pdf_pages = PdfPages(folder + '/' + pdfName.value + '.pdf')
for i in wells.value:
print folder+'/'+i
inputfile = folder +'/'+i
g, drift, wellname = svdi.imp_new_well(inputfile, wellinfo, manual, baro)
quer = "SELECT * FROM groundwater.reading where WellID = " + str(g['WellID'].values[0]) + " and DateTime > \'" + str(g.index.values[-1])[0:10] + " "+ str(g.index.values[-1])[11:19] + "\'"
if abs(float(drift)) < driftTol.value:
if len(pd.read_sql_query(sql=quer,con=engine))<1:
g.to_csv(enteredFolder+wellname+".csv", index=False)
tablename = 'reading'
g.to_sql(con=engine, name = tablename, if_exists='append', flavor='mysql', index=False)
print("Added to DB table " + tablename)
else:
print("Already Entered")
print(len(pd.read_sql_query(sql=quer,con=engine)))
else:
g.to_csv(checkFolder+wellname+".csv", index=False, columns= ["WellID","DateTime","MeasuredLevel","Temp","BaroEfficiencyCorrected","DeltaLevel",
"MeasuredDTW","DriftCorrection","DTWBelowCasing","DTWBelowGroundSurface",
"WaterElevation","Tape","MeasuredBy"])
print("Check File")
glist = g.columns.tolist()
for j in range(len(glist)):
if 'pw' in glist[j]:
h = glist[j]
y1 = g['WaterElevation'].values
y2 = baro['pw03'].values
x1 = g.index.values
x2 = baro.index.values
wellname, wellid = svdi.getwellid(folder+'\\'+i,wellinfo)
ylast = wellinfo[wellinfo['WellID']==wellid]['GroundElevation'].values[0] + wellinfo[wellinfo['WellID']==wellid]['Offset'].values[0] - svdi.fcl(manual[manual['WellID']== wellid],max(pd.to_datetime(g.index)))[1]
yfirst = wellinfo[wellinfo['WellID']==wellid]['GroundElevation'].values[0] + wellinfo[wellinfo['WellID']==wellid]['Offset'].values[0] - svdi.fcl(manual[manual['WellID']== wellid],min(pd.to_datetime(g.index)))[1]
xlast = pd.to_datetime(svdi.fcl(manual[manual['WellID']== wellid],max(pd.to_datetime(g.index))).name)
xfirst = pd.to_datetime(svdi.fcl(manual[manual['WellID']== wellid],min(pd.to_datetime(g.index))).name)
x4 = [xfirst,xlast]
y4 = [yfirst,ylast]
fig, ax1 = plt.subplots()
ax1.scatter(x4,y4,color='purple')
ax1.plot(x1,y1,color='blue',label='Water Level Elevation')
ax1.set_ylabel('Water Level Elevation',color='blue')
y_formatter = tick.ScalarFormatter(useOffset=False)
ax1.yaxis.set_major_formatter(y_formatter)
ax2 = ax1.twinx()
ax2.set_ylabel('Barometric Pressure (ft)', color='red')
ax2.plot(x2,y2,color='red',label='Barometric pressure (ft)')
h1, l1 = ax1.get_legend_handles_labels()
h2, l2 = ax2.get_legend_handles_labels()
ax1.legend(h1+h2, l1+l2, loc=3)
plt.xlim(xfirst-timedelta(days=3),xlast+timedelta(days=3))
plt.title('Well: ' + wellname.title() + ' ' + 'Total Drift = ' + str(g['DriftCorrection'][-1]))
pdf_pages.savefig(fig)
plt.close()
pdf_pages.close()
print("DONE!")
print("Files in "+ folder+'\\wells.pdf')
print(datetime.now())
In [ ]:
form = widgets.VBox()
driftTol = widgets.FloatSlider(value=0.05, min=0.00, max=10.0, step=0.05, description='Drift Tolerance:')
form.children = [driftTol]
display(form)
In [ ]:
pdf_pages = PdfPages(folder + '/wells.pdf')
for i in wellinfo.loc[:,'full_file_name']:
print folder+'/'+i
inputfile = folder +'/'+i
g, drift, wellname = svdi.imp_new_well(inputfile, wellinfo, manual, baro)
quer = "SELECT * FROM groundwater.reading where WellID = " + str(g['WellID'].values[0]) + " and (DateTime >= \'" + str(g.index.values[-1])[0:10] + "\')"
if abs(float(drift)) < driftTol.value:
if len(pd.read_sql_query(sql=quer,con=engine))<1:
g.to_csv(enteredFolder + wellname+".csv", index=False)
g.to_sql(con=engine, name='reading', if_exists='append', flavor='mysql', index=False)
print("Added to DB")
else:
print("Already Entered")
else:
g.to_csv(checkFolder + wellname+".csv", index=False, columns= ["WellID","DateTime","MeasuredLevel","Temp","BaroEfficiencyCorrected","DeltaLevel",
"MeasuredDTW","DriftCorrection","DTWBelowCasing","DTWBelowGroundSurface",
"WaterElevation","Tape","MeasuredBy"])
print("Check File")
glist = g.columns.tolist()
for j in range(len(glist)):
if 'pw' in glist[j]:
h = glist[j]
y1 = g['WaterElevation'].values
y2 = baro['pw03'].values
x1 = g.index.values
x2 = baro.index.values
wellname, wellid = svdi.getwellid(folder+'\\'+i,wellinfo)
ylast = wellinfo[wellinfo['WellID']==wellid]['GroundElevation'].values[0] + wellinfo[wellinfo['WellID']==wellid]['Offset'].values[0] - svdi.fcl(manual[manual['WellID']== wellid],max(pd.to_datetime(g.index)))[1]
yfirst = wellinfo[wellinfo['WellID']==wellid]['GroundElevation'].values[0] + wellinfo[wellinfo['WellID']==wellid]['Offset'].values[0] - svdi.fcl(manual[manual['WellID']== wellid],min(pd.to_datetime(g.index)))[1]
xlast = pd.to_datetime(svdi.fcl(manual[manual['WellID']== wellid],max(pd.to_datetime(g.index))).name)
xfirst = pd.to_datetime(svdi.fcl(manual[manual['WellID']== wellid],min(pd.to_datetime(g.index))).name)
x4 = [xfirst,xlast]
y4 = [yfirst,ylast]
fig, ax1 = plt.subplots()
ax1.scatter(x4,y4,color='purple')
ax1.plot(x1,y1,color='blue',label='Water Level Elevation')
ax1.set_ylabel('Water Level Elevation',color='blue')
y_formatter = tick.ScalarFormatter(useOffset=False)
ax1.yaxis.set_major_formatter(y_formatter)
ax2 = ax1.twinx()
ax2.set_ylabel('Barometric Pressure (ft)', color='red')
ax2.plot(x2,y2,color='red',label='Barometric pressure (ft)')
h1, l1 = ax1.get_legend_handles_labels()
h2, l2 = ax2.get_legend_handles_labels()
ax1.legend(h1+h2, l1+l2, loc=3)
plt.xlim(min(pd.to_datetime(g.index))-timedelta(days=3),max(pd.to_datetime(g.index))+timedelta(days=3))
plt.title('Well: ' + wellname.title() + ' ' + 'Total Drift = ' + str(g['DriftCorrection'][-1]))
pdf_pages.savefig(fig)
plt.close()
pdf_pages.close()
print("DONE!")
print("Files in "+ folder+'\\wells.pdf')