In [1]:
%matplotlib inline
import os
import sys
import platform
import matplotlib

import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.dates as dates
import matplotlib.ticker as tick
from matplotlib.backends.backend_pdf import PdfPages
import datetime
from pylab import rcParams
rcParams['figure.figsize'] = 15, 10

In [36]:
import loggerloader as ll
import wellapplication as wa
import arcpy

In [3]:
print("Operating System " + platform.system() + " " + platform.release())
print("Python Version " + str(sys.version))
print("Pandas Version " + str(pd.__version__))
print("Numpy Version " + str(np.__version__))
print("Matplotlib Version " + str(matplotlib.__version__))
print("Loggerloader Version {:}".format(ll.__version__))


Operating System Windows 7
Python Version 3.5.3 |Continuum Analytics, Inc.| (default, May 15 2017, 10:43:23) [MSC v.1900 64 bit (AMD64)]
Pandas Version 0.20.1
Numpy Version 1.11.2
Matplotlib Version 1.5.3
Loggerloader Version 0.2.5

In [4]:
drive = 'M:'
raw_archive_folder = drive + '/PROJECTS/Snake Valley Water/Transducer Data/Raw_data_archive'
folder = raw_archive_folder + '/2017/'
enteredFolder = folder + '/entered/'
checkFolder = folder + '/toCheck/'
#wellinfofile = drive + raw_archive_folder + '/well table 2015-03-23.csv'

In [5]:
if not os.path.exists(enteredFolder):
    print('Creating Output Folder')
    os.makedirs(enteredFolder)
else:
    print('Output Folder Exists')


Output Folder Exists

In [6]:
if not os.path.exists(checkFolder):
    print('Creating Check Folder')
    os.makedirs(checkFolder)
else:
    print('Check Folder Exists')


Check Folder Exists

Inputs for connection file and tables


In [7]:
conn_file_root = "C:/Users/PAULINKENBRANDT/AppData/Roaming/ESRI/Desktop10.4/ArcCatalog/"
conn_file = "UGS_SDE.sde" #production
arcpy.env.workspace = conn_file_root + conn_file
#gw_reading_table = "UGGP.UGGPADMIN.UGS_GW_reading"
#station_table =  "UGGP.UGGPADMIN.UGS_NGWMN_Monitoring_Locations"

Snake Valley Data Import

read over raw files -> append baro to db -> import well xle -> pull bp data from db -> remove bp press -> import manual measurements -> fix drift -> remove stickup -> include elevation

Import Barometric Data into Database


In [43]:
wellid = '43'
station_table =  "UGGP.UGGPADMIN.UGS_NGWMN_Monitoring_Locations"
#arcpy.env.workspace = self.sde_conn
loc_table = "UGGP.UGGPADMIN.UGS_NGWMN_Monitoring_Locations"

field_names = ['LocationID', 'LocationName', 'LocationType', 'LocationDesc', 'AltLocationID', 'Altitude', 
               'AltitudeUnits', 'WellDepth', 'SiteID', 'Offset', 'LoggerType', 'BaroEfficiency', 
               'BaroEfficiencyStart', 'BaroLoggerType']
df = pd.DataFrame(columns=field_names)

# use a search cursor to iterate rows
search_cursor = arcpy.da.SearchCursor(loc_table, field_names)

    # iterate the rows
for row in search_cursor:
        # combine the field names and row items together, and append them
    df = df.append(dict(zip(field_names, row)), ignore_index=True)
    
iddict = dict(zip(df['LocationName'].values,df['AltLocationID'].values))

well_table = df.set_index(['AltLocationID'])
baroid = well_table.loc[wellid,'BaroLoggerType']
stickup = well_table.loc[wellid,'Offset']
well_elev = well_table.loc[wellid,'Altitude']
be = well_table.loc[wellid,'BaroEfficiency']

In [40]:



Out[40]:
LocationID LocationName LocationType LocationDesc AltLocationID Altitude AltitudeUnits WellDepth SiteID Offset LoggerType BaroEfficiency BaroEfficiencyStart BaroLoggerType
0 393453113515501 P1001 Well Miller Spring Piezometer 3 1001 4760.66 None 3.6 40 None None 0.00 None 9061
1 393452113515501 P1002 Well Miller Spring Piezometer 2 1002 4761.89 None 4 40 None None 0.00 None 9061
2 393454113515501 P1003 Well Miller Spring Piezometer 4 1003 4760.67 None 3.8 40 None None 0.00 None 9061
3 393450113515501 P1004 Well Miller Spring Piezometer 1 1004 4761.41 None 3.8 40 None None 0.00 None 9061
4 393501113515601 P1005 Well Miller Spring Piezometer 5 1005 4760.68 None 4 40 None None 0.00 None 9061
5 393500113515601 P1006 Well Miller Spring Piezometer 6 1006 4759.73 None 3.8 40 None None 0.00 None 9061
6 393501113515701 P1007 Well Miller Spring Piezometer 7 1007 4759.88 None 3.8 40 None None 0.00 None 9061
7 393506113515701 P1008 Well Miller Spring Piezometer 9 1008 4759.11 None 3.9 40 None None 0.00 None 9061
8 393506113515601 P1009 Well Miller Spring Piezometer 10 1009 4759.00 None 4 40 None None 0.00 None 9061
9 393506113515801 P1010 Well Miller Spring Piezometer 8 1010 4759.58 None 3.9 40 None None 0.00 None 9061
10 393329113532901 P1011 Well Leland-Harris Piezometer 2 1011 4781.28 None 4.1 59 None None 0.00 None 9061
11 393330113532901 P1012 Well Leland-Harris Piezometer 1 1012 4781.63 None 3.8 59 None None 0.00 None 9061
12 393326113533101 P1013 Well Leland-Harris Piezometer 4 1013 4781.31 None 4.1 59 None None 0.00 None 9061
13 393328113533101 P1014 Well Leland-Harris Piezometer 3 1014 4779.06 None 3.7 59 None None 0.00 None 9061
14 393328113531701 P1015 Well Leland-Harris Piezometer 5 1015 4780.00 None 4.1 60 None None 0.00 None 9061
15 393328113531601 P1016 Well Leland-Harris Piezometer 6 1016 4777.50 None 2.7 60 None None 0.00 None 9061
16 393331113531801 P1017 Well Leland-Harris Piezometer 7 1017 4778.59 None 3.9 60 None None 0.00 None 9061
17 393333113531801 P1018 Well Leland-Harris Piezometer 8 1018 4778.60 None 4.2 60 None None 0.00 None 9061
18 393335113531901 P1019 Well Leland-Harris Piezometer 9 1019 4778.36 None 3.9 60 None None 0.00 None 9061
19 393337113531901 P1020 Well Leland-Harris Piezometer 10 1020 4780.08 None 4.2 60 None None 0.00 None 9061
20 392451113523201 P1021 Well Foote Reservoir Piezometer 1 1021 4814.01 None 3.8 -1 None None 0.00 None 9061
21 392450113523201 P1022 Well Foote Reservoir Piezometer 2 1022 4812.49 None 3.9 -1 None None 0.00 None 9061
22 392448113523401 P1023 Well Foote Reservoir Piezometer 3 1023 4812.54 None 3.7 -1 None None 0.00 None 9061
23 392449113524601 P1024 Well Foote Reservoir Piezometer 4 1024 4808.56 None 3.9 -1 None None 0.00 None 9061
24 392449113524801 P1025 Well Foote Reservoir Piezometer 5 1025 4809.78 None 4 -1 None None 0.00 None 9061
25 392449113524901 P1026 Well Foote Reservoir Piezometer 6 1026 4807.63 None 3.9 -1 None None 0.00 None 9061
26 392907113545801 P1027 Well Gandy Salt Marsh Piezometer 1 1027 4794.18 None 4.1 53 None None 0.00 None 9061
27 392907113545901 P1028 Well Gandy Salt Marsh Piezometer 2 1028 4792.67 None 4.3 53 None None 0.00 None 9061
28 392906113545801 P1029 Well Gandy Salt Marsh Piezometer 3 1029 4793.25 None 4.2 53 None None 0.00 None 9061
29 392904113545501 P1030 Well Gandy Salt Marsh Piezometer 4 1030 4791.79 None 3.7 53 None None 0.00 None 9061
... ... ... ... ... ... ... ... ... ... ... ... ... ... ...
336 405512111193801 WRSMB Well None 131 5835.00 None None 49 -0.14 None 0.00 None None
337 405511111193901 WLSMB Well None 132 5827.00 None None 49 -0.38 None 0.00 None None
338 405511111193701 W-5 Well None 133 5852.00 None None 49 -0.13 None 0.00 None None
339 404745111570001 AE-HW1 Well None 500 4211.00 None None 102 None None 0.00 None None
340 404744111570001 AE-HW2 Well None 501 4211.00 None None 102 None None 0.00 None None
341 404744111570101 AE-FW1 Well None 502 4213.00 None None 102 None None 0.00 None None
342 384702114041601 Snake Valley South MX Well None 71 5581.90 None None 30 2.15 None 0.00 None 9003
343 390812114033901 Snake Valley North MX Well None 70 5096.10 None None 29 2.13 Global Water until 6/8/10, then Solinst 0.00 None 9003
344 404938110055001 None Well None 503 10469.00 None None 103 None None 0.00 None None
345 404938110054901 None Well None 504 10465.00 None None 103 None None 0.00 None None
346 404938110055002 None Well None 505 10469.00 None None 103 None None 0.00 None None
347 404938110054902 None Well None 506 10465.00 None None 103 None None 0.00 None None
348 404938110054903 None Well None 507 10465.00 None None 103 None None 0.00 None None
349 404943110055003 None Well None 508 10334.00 None None 103 None None 0.00 None None
350 384620114024901 Big Springs Creek above Dearden Ranch Spring at Burbank UT 1 5438.00 None None 36 None None 0.00 None None
351 384636114024101 Highline Ditch Spring at Burbank UT 2 5426.00 None None 36 None None 0.00 None None
352 384638114023401 East Middle Ditch Spring at Burbank UT 3 5418.00 None None 36 None None 0.00 None None
353 384641114023402 West Middle Ditch Spring at Burbank UT 4 5420.00 None None 36 None None 0.00 None None
354 384631114024501 Dearden Springs Spring at Burbank UT 5 5424.00 None None 36 None None 0.20 None None
355 385156113593701 Clay Spring Spring 5.5 miles SSE of Garrison UT 6 5440.00 None None 37 None None 0.00 None None
356 391814114003001 Kell (Beck) Spring Spring 10 miles south of Gandy UT 7 4939.00 None None 39 None None 0.20 None 9003
357 392411113514601 Twin Springs south Spring 7.4 miles ESE of Gandy UT 8 4819.00 None None 73 None None 0.00 None 9061
358 392413113515001 Twin Springs north Spring 7.4 miles ESE of Gandy UT 9 4819.00 None None 73 None None 0.33 None 9061
359 392456113521301 Foote Reservoir flume Spring 6.7 miles ESE of Gandy UT 10 4827.00 None None 38 None None 0.33 None None
360 392548113531701 Pivot north of Foote Reservoir Spring 5.5 miles ESE of Gandy UT 11 4824.00 None None 38 None None 0.10 None None
361 393448113515401 Miller Spring Spring 4.5 miles SE of Partoun UT 12 4771.00 None None 40 None None 0.25 None None
362 392459113521302 Foote Reservoir Spring (combined flow) Spring 6.7 miles ESE of Gandy UT 13 4833.00 None None 38 None None 0.30 None None
363 391926113391803 PW10 Baro None Barometer at PW10 9027 5266.30 None None -1 None Solinst 0.00 None None
364 393803113161602 PW19 Baro None PW19 Barometer 9049 4633.00 None None -1 None None 0.00 None None
365 393331113533004 SG25 Barometer None SG25 Barometer 9061 4784.00 None None -1 None None 0.00 None None

366 rows × 14 columns


In [31]:
['LocationID', 'LocationName', 'LocationType', 'LocationDesc', 'AltLocationID', 'Altitude', 'AltitudeUnits',
 'WellDepth', 'SiteID', 'Offset', 'LoggerType', 'BaroEfficiency', 'BaroEfficiencyStart', 'BaroLoggerType']


---------------------------------------------------------------------------
TypeError                                 Traceback (most recent call last)
<ipython-input-31-28d4e564afad> in <module>()
----> 1 sorted(loc_names)

TypeError: unorderable types: NoneType() < str()

In [14]:
fc = "UGGP.UGGPADMIN.UGS_NGWMN_Monitoring_Locations"
field = "LocationName"
cursor = arcpy.SearchCursor(fc)
ll.table_to_pandas_dataframe(fc, field_names=['LocationName''AltLocationID'])
for row in cursor:
    print(row.getValue(field))


Out[14]:
{'AE-FW1': '502',
 'AE-HW1': '500',
 'AE-HW2': '501',
 'AG13A': '33',
 'AG13B': '34',
 'AG13C': '35',
 'AG14A': '36',
 'AG14B': '37',
 'AG14C': '38',
 'AG15': '39',
 'AG16A': '40',
 'AG16B': '41',
 'AG16C': '42',
 'B-1': '81',
 'B-1 (15)': '100',
 'B-105': '85',
 'B-107': '86',
 'B-109': '87',
 'B-15B': '93',
 'B-16': '89',
 'B-2': '82',
 'B-2 (20)': '101',
 'B-2 (30)': '102',
 'B-3': '103',
 'B-4': '104',
 'B-5': '105',
 'B-6': '106',
 'B-7': '107',
 'B-8 (30)': '109',
 'B-8 (50)': '110',
 'B-9': '111',
 'BARO1 at Leland-Harris': '9025',
 'BH121': '126',
 'BH123': '125',
 'BH125': '127',
 'BL-P1': '134',
 'BL-P2': '135',
 'Big Springs Creek above Dearden Ranch': '1',
 'CHWENT': '138',
 'CHWNAV': '139',
 'Callao C119': '136',
 'Central Tule MX': '75',
 'Clay Spring': '6',
 'Coyote Knolls MX': '46',
 'Dearden Springs': '5',
 'East Middle Ditch': '3',
 'Eskdale MX': '73',
 'Foote Reservoir Spring (combined flow)': '13',
 'Foote Reservoir flume': '10',
 'GP1': '123',
 'GP2': '124',
 'Highline Ditch': '2',
 'Kell (Beck) Spring': '7',
 'MW-2': '112',
 'Miller Spring': '12',
 'Needle Point 23a': '72',
 'P-1': '76',
 'P-3': '137',
 'P-4': '78',
 'P-5': '79',
 'P1001': '1001',
 'P1002': '1002',
 'P1003': '1003',
 'P1004': '1004',
 'P1005': '1005',
 'P1006': '1006',
 'P1007': '1007',
 'P1008': '1008',
 'P1009': '1009',
 'P1010': '1010',
 'P1011': '1011',
 'P1012': '1012',
 'P1013': '1013',
 'P1014': '1014',
 'P1015': '1015',
 'P1016': '1016',
 'P1017': '1017',
 'P1018': '1018',
 'P1019': '1019',
 'P1020': '1020',
 'P1021': '1021',
 'P1022': '1022',
 'P1023': '1023',
 'P1024': '1024',
 'P1025': '1025',
 'P1026': '1026',
 'P1027': '1027',
 'P1028': '1028',
 'P1029': '1029',
 'P1030': '1030',
 'P1031': '1031',
 'P1032': '1032',
 'P1033': '1033',
 'P1034': '1034',
 'P1035': '1035',
 'P1036': '1036',
 'P1037': '1037',
 'P1038': '1038',
 'P1039': '1039',
 'P1040': '1040',
 'P1041': '1041',
 'P1042': '1042',
 'P1043': '1043',
 'P1044': '1044',
 'P1045': '1045',
 'P1046': '1046',
 'P1047': '1047',
 'P1048': '1048',
 'P1049': '1049',
 'P1050': '1050',
 'P1051': '1051',
 'P1052': '1052',
 'P1053': '1053',
 'P1054': '1054',
 'P1055': '1055',
 'P1056': '1056',
 'P1057': '1057',
 'P1058': '1058',
 'P1059': '1059',
 'P1060': '1060',
 'P1061': '1061',
 'P1062': '1062',
 'P1063': '1063',
 'P1064': '1064',
 'P1065': '1065',
 'P1066': '1066',
 'P1067': '1067',
 'P1068': '1068',
 'P1069': '1069',
 'P1070': '1070',
 'P1071': '1071',
 'P1072': '1072',
 'P1073': '1073',
 'P1075': '1075',
 'P1076': '1076',
 'P1077': '1077',
 'P1078': '1078',
 'P1079': '1079',
 'P1080': '1080',
 'P1081': '1081',
 'P1090': '1090',
 'P1091': '1091',
 'P1092': '1092',
 'P1093': '1093',
 'P1094': '1094',
 'P1095': '1095',
 'P1096': '1096',
 'P1097': '1097',
 'P1098': '1098',
 'P2001': '2001',
 'P2002': '2002',
 'P2003': '2003',
 'P3001': '3001',
 'P3002': '3002',
 'P3003': '3003',
 'PW01A': '1',
 'PW01B': '2',
 'PW01C': '3',
 'PW02A': '4',
 'PW02B': '5',
 'PW03 Baro': '9003',
 'PW03A': '6',
 'PW03B': '7',
 'PW03Z': '8',
 'PW04A': '9',
 'PW04B': '10',
 'PW05A': '11',
 'PW05B': '12',
 'PW05C': '13',
 'PW06A': '14',
 'PW06B': '15',
 'PW06C': '16',
 'PW06D': '17',
 'PW06MX': '18',
 'PW07A': '19',
 'PW07B': '20',
 'PW07MX': '21',
 'PW08A': '22',
 'PW08B': '23',
 'PW09A': '24',
 'PW09B': '25',
 'PW10 Baro': '9027',
 'PW10A': '26',
 'PW10B': '27',
 'PW11B': '28',
 'PW11C': '29',
 'PW11D': '30',
 'PW11E': '31',
 'PW12': '32',
 'PW17A': '43',
 'PW17B': '44',
 'PW17C': '45',
 'PW18': '47',
 'PW19 Baro': '9049',
 'PW19A': '48',
 'PW19B': '49',
 'PW19C': '50',
 'PW20': '51',
 'Pivot north of Foote Reservoir': '11',
 'SG21A': '52',
 'SG21B': '53',
 'SG21C': '54',
 'SG22A': '55',
 'SG22B': '56',
 'SG23B': '57',
 'SG24A': '58',
 'SG24B': '59',
 'SG24C': '60',
 'SG25 Barometer': '9061',
 'SG25A': '61',
 'SG25B': '62',
 'SG25C': '63',
 'SG25D': '64',
 'SG26A': '65',
 'SG26B': '66',
 'SG26C': '67',
 'SG27': '68',
 'SH-1': '108',
 'Shell-Baker': '74',
 'Snake Valley North MX': '70',
 'Snake Valley South MX': '71',
 'TP-4': '88',
 'Twin Springs Baro': '9024',
 'Twin Springs MX': '69',
 'Twin Springs north': '9',
 'Twin Springs south': '8',
 'W-1': '94',
 'W-2': '130',
 'W-3': '129',
 'W-4': '96',
 'W-5': '133',
 'W-C1': '97',
 'W-C2': '98',
 'WLSMB': '132',
 'WRSMB': '131',
 'West Middle Ditch': '4'}

In [15]:
os.environ.get('USERNAME')


Out[15]:
'paulinkenbrandt'

In [16]:
os.path.splitext(well_file)[1] == '.xle'


Out[16]:
True

In [14]:
well_file = folder+'pw03z 20170309.xle'
baro_file = folder+'pw03baro 20170309.xle'
#ll.imp_one_well(well_file,baro_file,)

In [ ]:
well_table = ll.match_files_to_wellid(folder)
#query barometers to a single table

well_table.to_pickle(folder+'well_table.pickle')

In [ ]:
bpunits = []
baroid = well_table.loc[wellid,'BaroLoggerType']
stickup = well_table.loc[wellid,'Offset']
well_elev = well_table.loc[wellid,'Altitude']
be = well_table.loc[wellid,'BaroEfficiency']

Run Transducer Processing


In [13]:
well_table = ll.match_files_to_wellid(folder)
#query barometers to a single table

well_table.to_pickle(folder+'well_table.pickle')


---------------------------------------------------------------------------
ValueError                                Traceback (most recent call last)
<ipython-input-13-c3f14f0caf49> in <module>()
----> 1 well_table = ll.match_files_to_wellid(folder)
      2 #query barometers to a single table
      3 
      4 well_table.to_pickle(folder+'well_table.pickle')

C:\Program Files\ArcGIS\Pro\bin\Python\envs\arcgispro-py3\lib\site-packages\loggerloader\transport.py in match_files_to_wellid(folder, station_table)
   1217             return try_match
   1218 
-> 1219     well_table['WellID'] = well_table[['Location', 'fileroot']].apply(lambda x: tryfile(x), 1)
   1220     well_table['baronum'] = well_table['WellID'].apply(lambda x: bdict.get(x), 1)
   1221     well_table['Altitude'] = well_table['WellID'].apply(lambda x: elevdict.get(x), 1)

C:\Program Files\ArcGIS\Pro\bin\Python\envs\arcgispro-py3\lib\site-packages\pandas\core\frame.py in apply(self, func, axis, broadcast, raw, reduce, args, **kwds)
   4358                         f, axis,
   4359                         reduce=reduce,
-> 4360                         ignore_failures=ignore_failures)
   4361             else:
   4362                 return self._apply_broadcast(f, axis)

C:\Program Files\ArcGIS\Pro\bin\Python\envs\arcgispro-py3\lib\site-packages\pandas\core\frame.py in _apply_standard(self, func, axis, ignore_failures, reduce)
   4454             try:
   4455                 for i, v in enumerate(series_gen):
-> 4456                     results[i] = func(v)
   4457                     keys.append(v.name)
   4458             except Exception as e:

C:\Program Files\ArcGIS\Pro\bin\Python\envs\arcgispro-py3\lib\site-packages\loggerloader\transport.py in <lambda>(x)
   1217             return try_match
   1218 
-> 1219     well_table['WellID'] = well_table[['Location', 'fileroot']].apply(lambda x: tryfile(x), 1)
   1220     well_table['baronum'] = well_table['WellID'].apply(lambda x: bdict.get(x), 1)
   1221     well_table['Altitude'] = well_table['WellID'].apply(lambda x: elevdict.get(x), 1)

C:\Program Files\ArcGIS\Pro\bin\Python\envs\arcgispro-py3\lib\site-packages\loggerloader\transport.py in tryfile(x)
   1210         nameparts = str(x[1]).split(' ')
   1211         try_match = iddict.get(loc_name_strip)
-> 1212         if try_match is None or int(str(try_match)) > 140:
   1213             file_name_strip = str(' '.join(nameparts[:-1])).lower().replace(" ", "").replace("-", "")
   1214             wl_value = iddict.get(file_name_strip)

ValueError: ("invalid literal for int() with base 10: '33.0'", 'occurred at index ag13a 20160802')

In [ ]:
maxtime = max(pd.to_datetime(well_table['Stop_time']))
mintime = min(pd.to_datetime(well_table['Start_time']))
print('Pulling Barometric Pressure data from {:} to {:}'.format(mintime, maxtime))

In [ ]:
bpunits = []
for ind in well_table.index:
    if 'baro' in str(ind) or 'baro' in str(well_table.loc[ind,'Location']):
        bpunits.append(well_table.loc[ind,'WellID'])

baro_out = {}
for baroid in bpunits:
    baro_out[baroid] = ll.get_location_data(gw_reading_table, baroid, mintime, 
                                   maxtime + datetime.timedelta(days=1))
    baro_out[baroid].to_pickle(folder+str(baroid)+'.pickle')

Bring in existing data


In [9]:
well_table =pd.read_pickle(folder+'well_table.pickle')

bpunits = []
for ind in well_table.index:
    if 'baro' in str(ind) or 'baro' in str(well_table.loc[ind,'Location']):
        bpunits.append(well_table.loc[ind,'WellID'])

baro_out = {}
for baroid in bpunits:
    baro_out[baroid] = pd.read_pickle(folder + str(baroid) + '.pickle')

In [10]:
manualwls = raw_archive_folder + '/All tape measurements.csv'
manual = pd.read_csv(manualwls, index_col="DateTime", engine="python")

In [ ]:
man_startdate = '1/1/2001' 
man_endate = '10/11/2002'
man_start_level = 10 
man_end_level =15.1

arcpy.AddMessage('Well {:} successfully imported!'.format(ind))

Upload Well Data To SDE


In [11]:
barocolumn='MEASUREDLEVEL'
dft_ln = {}
dft_st = {}

from matplotlib.backends.backend_pdf import PdfPages
pdf_pages = PdfPages(folder + '/wells.pdf')

welltest = well_table.index.values
for ind in welltest:
    # import well file
    df, man, be, drift = ll.imp_well(well_table,ind,manual,baro_out)
    
    # plot data
    y1 = df['WATERELEVATION'].values
    y2 = df['barometer'].values
    x1 = df.index.values
    x2 = df.index.values

    x4 = man.index
    y4 = man['Meas_GW_Elev']
    fig, ax1 = plt.subplots()
    ax1.scatter(x4,y4,color='purple')
    ax1.plot(x1,y1,color='blue',label='Water Level Elevation')
    ax1.set_ylabel('Water Level Elevation',color='blue')
    ax1.set_ylim(min(df['WATERELEVATION']),max(df['WATERELEVATION']))
    y_formatter = tick.ScalarFormatter(useOffset=False)
    ax1.yaxis.set_major_formatter(y_formatter)
    ax2 = ax1.twinx()
    ax2.set_ylabel('Barometric Pressure (ft)', color='red') 
    ax2.plot(x2,y2,color='red',label='Barometric pressure (ft)')
    h1, l1 = ax1.get_legend_handles_labels()
    h2, l2 = ax2.get_legend_handles_labels()
    ax1.legend(h1+h2, l1+l2, loc=3)
    plt.xlim(df.first_valid_index()-datetime.timedelta(days=3),df.last_valid_index()+datetime.timedelta(days=3))
    plt.title('Well: {:}  Drift: {:}  Baro. Eff.: {:}'.format(ind,drift,be))

    pdf_pages.savefig(fig)
    plt.close()



    
pdf_pages.close()
print("DONE!")
print("Files in "+ folder+'\\wells.pdf')


C:\Program Files\ArcGIS\Pro\bin\Python\envs\arcgispro-py3\lib\site-packages\pandas\core\indexing.py:337: SettingWithCopyWarning: 
A value is trying to be set on a copy of a slice from a DataFrame.
Try using .loc[row_indexer,col_indexer] = value instead

See the caveats in the documentation: http://pandas.pydata.org/pandas-docs/stable/indexing.html#indexing-view-versus-copy
  self.obj[key] = _infer_fill_value(value)
C:\Program Files\ArcGIS\Pro\bin\Python\envs\arcgispro-py3\lib\site-packages\pandas\core\indexing.py:517: SettingWithCopyWarning: 
A value is trying to be set on a copy of a slice from a DataFrame.
Try using .loc[row_indexer,col_indexer] = value instead

See the caveats in the documentation: http://pandas.pydata.org/pandas-docs/stable/indexing.html#indexing-view-versus-copy
  self.obj[item] = s
C:\Program Files\ArcGIS\Pro\bin\Python\envs\arcgispro-py3\lib\site-packages\loggerloader\transport.py:158: SettingWithCopyWarning: 
A value is trying to be set on a copy of a slice from a DataFrame.
Try using .loc[row_indexer,col_indexer] = value instead

See the caveats in the documentation: http://pandas.pydata.org/pandas-docs/stable/indexing.html#indexing-view-versus-copy
  df['julian'] = df.index.to_julian_date()
C:\Program Files\ArcGIS\Pro\bin\Python\envs\arcgispro-py3\lib\site-packages\loggerloader\transport.py:174: SettingWithCopyWarning: 
A value is trying to be set on a copy of a slice from a DataFrame.
Try using .loc[row_indexer,col_indexer] = value instead

See the caveats in the documentation: http://pandas.pydata.org/pandas-docs/stable/indexing.html#indexing-view-versus-copy
  df['datechange'] = df['julian'].apply(lambda x: x - df.loc[df.index[0], 'julian'], 1)
C:\Program Files\ArcGIS\Pro\bin\Python\envs\arcgispro-py3\lib\site-packages\loggerloader\transport.py:178: SettingWithCopyWarning: 
A value is trying to be set on a copy of a slice from a DataFrame.
Try using .loc[row_indexer,col_indexer] = value instead

See the caveats in the documentation: http://pandas.pydata.org/pandas-docs/stable/indexing.html#indexing-view-versus-copy
  df['DRIFTCORRECTION'] = df['datechange'].apply(lambda x: m * x , 1)
C:\Program Files\ArcGIS\Pro\bin\Python\envs\arcgispro-py3\lib\site-packages\loggerloader\transport.py:179: SettingWithCopyWarning: 
A value is trying to be set on a copy of a slice from a DataFrame.
Try using .loc[row_indexer,col_indexer] = value instead

See the caveats in the documentation: http://pandas.pydata.org/pandas-docs/stable/indexing.html#indexing-view-versus-copy
  df[outcolname] = df[meas] - (df['DRIFTCORRECTION'] + b)
Last reading in database for well ag13a 20160802 was on 2017-03-08 10:00:00.
        The first reading from the raw file is on 2016-05-03 11:00:00. Drift = -0.009.
Dates later than import data for this station already exist!
Last reading in database for well ag13a 20170308 was on 2017-03-08 10:00:00.
        The first reading from the raw file is on 2016-08-02 11:00:00. Drift = 0.032.
Dates later than import data for this station already exist!
Last reading in database for well ag13b 20160802 was on 2016-08-02 09:00:00.
        The first reading from the raw file is on 2016-05-03 11:00:00. Drift = -0.041.
Dates later than import data for this station already exist!
Last reading in database for well ag13c 20160802 was on 2016-08-02 08:00:00.
        The first reading from the raw file is on 2016-05-03 11:00:00. Drift = -1.075.
Dates later than import data for this station already exist!
Last reading in database for well ag14b 20170309 was on 2017-03-09 07:00:00.
        The first reading from the raw file is on 2016-08-02 12:00:00. Drift = 0.106.
Dates later than import data for this station already exist!
Last reading in database for well ag14c 20160802 was on 2017-03-09 07:00:00.
        The first reading from the raw file is on 2016-05-03 13:00:00. Drift = -0.06.
Dates later than import data for this station already exist!
Last reading in database for well ag16a 20160802 was on 2017-03-08 09:00:00.
        The first reading from the raw file is on 2016-05-03 11:00:00. Drift = -0.029.
Dates later than import data for this station already exist!
Last reading in database for well ag16a 20170308 was on 2017-03-08 09:00:00.
        The first reading from the raw file is on 2016-08-02 10:00:00. Drift = 0.03.
Dates later than import data for this station already exist!
Last reading in database for well ag16b 20160802 was on 2016-08-02 07:00:00.
        The first reading from the raw file is on 2016-05-03 10:00:00. Drift = 0.02.
Dates later than import data for this station already exist!
Last reading in database for well callao 20160804 was on 2016-08-04 06:00:00.
        The first reading from the raw file is on 2016-05-05 10:00:00. Drift = -0.03.
Dates later than import data for this station already exist!
Last reading in database for well central tule mx 20160803 was on 2017-03-09 12:00:00.
        The first reading from the raw file is on 2016-05-04 15:00:00. Drift = -0.053.
Dates later than import data for this station already exist!
C:\Program Files\ArcGIS\Pro\bin\Python\envs\arcgispro-py3\lib\site-packages\loggerloader\transport.py:171: RuntimeWarning: divide by zero encountered in double_scalars
  m = drift / (last_man['julian'] - first_man['julian'])
Last reading in database for well ckmx 20170307 was on 2016-08-03 10:00:00.
        The first reading from the raw file is on 2016-08-03 14:00:00. Drift = 0.041.
Well ckmx 20170307 successfully imported!
Last reading in database for well coyote knolls mx 20160803 was on 2016-08-03 10:00:00.
        The first reading from the raw file is on 2016-05-04 14:00:00. Drift = -0.077.
Dates later than import data for this station already exist!
Last reading in database for well ctvmx 20170309 was on 2017-03-09 12:00:00.
        The first reading from the raw file is on 2016-08-03 15:00:00. Drift = 0.065.
Dates later than import data for this station already exist!
Units in psi, converting to ft...
Dropped from beginning to 2016-09-14 09:00:00
Last reading in database for well eskdale mx 20170309 was on 2017-03-09 08:00:00.
        The first reading from the raw file is on 2016-09-14 11:00:00. Drift = 0.274.
Dates later than import data for this station already exist!
Last reading in database for well needle point blm 20160801 was on 2016-08-01 13:00:00.
        The first reading from the raw file is on 2016-05-02 16:00:00. Drift = -0.031.
Dates later than import data for this station already exist!
Last reading in database for well pw01a 20160802 was on 2017-03-08 11:00:00.
        The first reading from the raw file is on 2016-05-03 12:00:00. Drift = -0.044.
Dates later than import data for this station already exist!
Dropped from beginning to 2016-08-02 10:00:00
Last reading in database for well pw01a 20170308 was on 2017-03-08 11:00:00.
        The first reading from the raw file is on 2016-08-02 12:00:00. Drift = -0.007.
Dates later than import data for this station already exist!
Last reading in database for well pw01b 20160802 was on 2017-03-08 11:00:00.
        The first reading from the raw file is on 2016-05-03 12:00:00. Drift = -0.043.
Dates later than import data for this station already exist!
Last reading in database for well pw01b 20170308 was on 2017-03-08 11:00:00.
        The first reading from the raw file is on 2016-08-02 11:00:00. Drift = 0.043.
Dates later than import data for this station already exist!
Last reading in database for well pw02a 20160801 was on 2017-03-08 09:00:00.
        The first reading from the raw file is on 2016-05-02 17:00:00. Drift = -0.024.
Dates later than import data for this station already exist!
Last reading in database for well pw02a 20170308 was on 2017-03-08 09:00:00.
        The first reading from the raw file is on 2016-08-01 17:00:00. Drift = -0.267.
Dates later than import data for this station already exist!
Last reading in database for well pw02b 20160801 was on 2017-03-08 09:00:00.
        The first reading from the raw file is on 2016-05-02 01:00:00. Drift = 0.01.
Dates later than import data for this station already exist!
Last reading in database for well pw02b 20170308 was on 2017-03-08 09:00:00.
        The first reading from the raw file is on 2016-08-01 17:00:00. Drift = 0.067.
Dates later than import data for this station already exist!
---------------------------------------------------------------------------
FileNotFoundError                         Traceback (most recent call last)
<ipython-input-11-0b9a31aaf7f8> in <module>()
      9 for ind in welltest:
     10     # import well file
---> 11     df, man, be, drift = ll.imp_well(well_table,ind,manual,baro_out)
     12 
     13     # plot data

C:\Program Files\ArcGIS\Pro\bin\Python\envs\arcgispro-py3\lib\site-packages\loggerloader\transport.py in imp_well(well_table, ind, manual, baro_out, gw_reading_table)
    965     import arcpy
    966     # import well file
--> 967     well = new_trans_imp(full_filepath, xle=(trans_type == 'Solinst'))
    968 
    969     # remove barometric pressure

C:\Program Files\ArcGIS\Pro\bin\Python\envs\arcgispro-py3\lib\site-packages\loggerloader\transport.py in new_trans_imp(infile, xle)
    665     """
    666     if xle:
--> 667         well = new_xle_imp(infile)
    668     else:
    669         well = new_csv_imp(infile)

C:\Program Files\ArcGIS\Pro\bin\Python\envs\arcgispro-py3\lib\site-packages\loggerloader\transport.py in new_xle_imp(infile)
    539     """
    540     # open text file
--> 541     with open(infile, "rb") as f:
    542         obj = xmltodict.parse(f, xml_attribs=True, encoding="ISO-8859-1")
    543     # navigate through xml to the data

FileNotFoundError: [Errno 2] No such file or directory: 'M:/PROJECTS/Snake Valley Water/Transducer Data/Raw_data_archive/2017/pw03 baro 20160803.xle'

In [12]:
pdf_pages.savefig(fig)
plt.close()



    
pdf_pages.close()
print("DONE!")
print("Files in "+ folder+'\\wells.pdf')


DONE!
Files in M:/PROJECTS/Snake Valley Water/Transducer Data/Raw_data_archive/2017/\wells.pdf

In [ ]:
def baro_eff(df, bp, wl, lag=200):
    import statsmodels.tsa.tsatools as tools
    df.dropna(inplace=True)
    dwl = df[wl].diff().values[1:-1]
    dbp = df[bp].diff().values[1:-1]
    # dwl = df[wl].values[1:-1]
    # dbp = df[bp].values[1:-1]
    df['j_dates'] = df.index.to_julian_date()
    lag_time = df['j_dates'].diff().cumsum().values[1:-1]
    df.drop('j_dates', axis=1, inplace=True)
    # Calculate BP Response Function

    ## create lag matrix for regression
    bpmat = tools.lagmat(dbp, lag, original='in')
    ## transpose matrix to determine required length
    ## run least squared regression
    sqrd = np.linalg.lstsq(bpmat, dwl)
    wlls = sqrd[0]
    cumls = np.cumsum(wlls)
    negcumls = [-1 * cumls[i] for i in range(len(cumls))]
    ymod = np.dot(bpmat, wlls)

    ## resid gives the residual of the bp
    resid = [(dwl[i] - ymod[i]) for i in range(len(dwl))]
    lag_trim = lag_time[0:len(cumls)]
    return negcumls, cumls, ymod, resid, lag_time, dwl, dbp, wlls

In [ ]:
baro_eff(df,'barometer','Level')

In [ ]:
# plot data
wl = 'cor2'
y1 = df[wl].values
y2 = df['barometer'].values
x1 = df.index.values
x2 = df.index.values

x4 = man.index
y4 = man['Meas_GW_Elev']
fig, ax1 = plt.subplots()
ax1.scatter(x4,y4,color='purple')
ax1.plot(x1,y1,color='blue',label='Water Level Elevation')
ax1.set_ylabel('Water Level Elevation',color='blue')
ax1.set_ylim(min(df[wl]),max(df[wl]))
y_formatter = tick.ScalarFormatter(useOffset=False)
ax1.yaxis.set_major_formatter(y_formatter)
ax2 = ax1.twinx()
ax2.set_ylabel('Barometric Pressure (ft)', color='red') 
ax2.plot(x2,y2,color='red',label='Barometric pressure (ft)')
h1, l1 = ax1.get_legend_handles_labels()
h2, l2 = ax2.get_legend_handles_labels()
ax1.legend(h1+h2, l1+l2, loc=3)
plt.xlim(df.first_valid_index()-datetime.timedelta(days=3),df.last_valid_index()+datetime.timedelta(days=3))
plt.title('Well: {:}  Drift: {:}  Baro. Eff.: {:}'.format(ind,drift,be))

In [ ]:
df['cor2'] = df[['Level', 'barometer']].\
        apply(lambda x: x[0] + 0.25 * (x[1]), 1)

In [ ]:
df['barometer'][0]

In [ ]:
plt.scatter(df['Level'].diff(), df['barometer'].diff())
import statsmodels.api as sm
df['dbp'] = df['barometer'].diff()
df['dwl'] = df['corrwl'].diff()
df1 = df.dropna(subset=['dbp','dwl'])
x = df1['dbp']
y = df1['dwl']
X = sm.add_constant(x)
model = sm.OLS(y, X).fit()
    # y_reg = [data.ix[i,'Sbp']*m+b for i in range(len(data['Sbp']))]
b = model.params[0]
m = model.params[1]
r = model.rsquared
print(m,r)

In [ ]:
be, intc, r = ll.clarks(df[500:600],'barometer','corrwl')
print(be,intc,r)

Barometric Pressure


In [ ]:
#baro_twin = lgld.compilation("H:/SnakeValley/Wetlands/Baro/1044788*")
baro_leland = lgld.compilation("H:/SnakeValley/Wetlands/Baro/*1044779*")

In [ ]:
baro_twin = lgld.compilation("H:/SnakeValley/Wetlands/Baro/*1044788*")

In [ ]:
xlefiles = xle_head_table("H:/SnakeValley/Wetlands/Baro/")

In [ ]:
#xlefiles.index = xlefiles.index.droplevel(level=1)
xlefiles.index.name = 'filename'

In [ ]:
pw03baro_append = folder + "\\pw03 baro 2016-08-03.xle"
pw10baro_append = folder + "\\pw10 baro 2016-08-03.xle"
pw19baro_append = folder + "\\pw19 baro 2016-08-04.xle"

In [ ]:
pw03baro = raw_archive_folder + "\\pw03baro.csv"
pw10baro = raw_archive_folder + "\\pw10baro.csv"
pw19baro = raw_archive_folder +  "\\pw19baro.csv"

In [ ]:
wa.appendomatic(pw03baro_append,pw03baro)
wa.appendomatic(pw10baro_append,pw10baro)
wa.appendomatic(pw19baro_append,pw19baro)

In [ ]:
# duplicated to update changes made by appendomatic
pw03baro = pd.read_csv(raw_archive_folder + "\\pw03baro.csv",index_col='DateTime',parse_dates=True)
pw10baro = pd.read_csv(raw_archive_folder + "\\pw10baro.csv",index_col='DateTime',parse_dates=True)
pw19baro = pd.read_csv(raw_archive_folder + "\\pw19baro.csv",index_col='DateTime',parse_dates=True)

In [ ]:
pw03baro = lgld.hourly_resample(pw03baro)
pw10baro = lgld.hourly_resample(pw10baro)
pw19baro = lgld.hourly_resample(pw19baro)

In [ ]:
baro = pd.merge(pw03baro,pw10baro,how="outer",left_index=True,right_index=True)
baro = pd.merge(baro,pw19baro,how="outer",left_index=True,right_index=True)
baro.dropna(axis=0,inplace=True)
baro['integr'] = 0 #for vented transducers

In [ ]:
baro[['pw03','pw10','pw19']].plot()

In [ ]:
pw03baro['Temperature'].plot()

In [ ]:


In [ ]:


In [ ]:


In [ ]:
barodict = {"PW10 Barometer":9027, "PW19 Barometer":9049, "SG25 Barometer":9061, 
            "Leland-Harris Barometer":9025, "Twin Springs Barometer":9024, "PW03 Barometer":9003}
bpdict = {'pw03':'9003','pw10':'9027','pw19':'9049','twin':'9024','leland':'9025'}

In [ ]:


In [ ]:
baro.to_csv(raw_archive_folder + '\\baro.csv')

In [ ]:
'ReadingID',
 'WellID',
 'DateTime',
 'MeasuredLevel',
 'Temp',

Water Level Tranducer Data

Export and Plot Data

Export Manual Data


In [ ]:
manualwls = raw_archive_folder + "\\All tape measurements.csv"
manual = pd.read_csv(manualwls, index_col="DateTime", engine="python")
manualrecent = manual[manual.index.to_datetime() > pd.datetime(2015,6,1)]
manualrecent.dropna(inplace=True)
manualrecent.reset_index(inplace=True)
print manualrecent.dtypes
manualrecent = pd.merge(manualrecent, wellinfo, how='left',left_on='WellID', right_index=True)
manualrecent.loc[:,"MeasuredLevel"] = np.nan
manualrecent.loc[:,"Temp"] = np.nan
manualrecent.loc[:,"BaroEfficiencyCorrected"] = np.nan 
manualrecent.loc[:,"DeltaLevel"] = np.nan
manualrecent.loc[:,"DriftCorrection"] = np.nan
manualrecent.loc[:,"MeasuredBy"] = np.nan
manualrecent.loc[:,"Tape"] = 1
manualrecent.loc[:,"DTWBelowGroundSurface"] = np.nan
manualrecent.loc[:,"WaterElevation"] = np.nan
#manualrecent["DTWBelowGroundSurface"] = np.nan
manualrecent.loc[:,"DTWBelowCasing"] = manualrecent.loc[:,"MeasuredDTW"]
manualrecent.loc[:,"DTWBelowGroundSurface"] = manualrecent.loc[:,"MeasuredDTW"] - manualrecent.loc[:,"Offset"]
manualrecent.loc[:,"WaterElevation"] = manualrecent.loc[:,'GroundElevation'] - manualrecent.loc[:,"DTWBelowGroundSurface"]
print manualrecent

#outpath = pathlist[0] + '\\' + pathlist[1] + '\\' + pathlist[2] + '\\' + pathlist[3] + '\\' + pathlist[4] + '\\' + 'Manual' + '.csv'  
manualrecent.to_csv(raw_archive_folder+ 'Manual' + '.csv', index=True, columns= ["WellID","DateTime","MeasuredLevel","Temp","BaroEfficiencyCorrected","DeltaLevel",
                                             "MeasuredDTW","DriftCorrection","DTWBelowCasing","DTWBelowGroundSurface",
                                             "WaterElevation","Tape","MeasuredBy"])

In [ ]:
manual['DateTime'] = manual.index.to_datetime()
manual.to_csv(raw_archive_folder+ 'Manual' + '.csv', index=False, columns= ["WellID","DateTime","MeasuredLevel","Temp","BaroEfficiencyCorrected","DeltaLevel",
                                             "MeasuredDTW","DriftCorrection","DTWBelowCasing","DTWBelowGroundSurface",
                                             "WaterElevation","Tape","MeasuredBy"])

List Files to Export


In [ ]:
print wellinfo.loc[:,'full_file_name']

Files To Database


In [ ]:
manualwls = raw_archive_folder + '/All tape measurements.csv'
manual = pd.read_csv(manualwls, index_col="DateTime", engine="python")
barofile = raw_archive_folder + '/baro.csv'
baro = pd.read_csv(barofile,index_col=0, parse_dates=True)
wellinfo = pd.read_csv(folder + '/wellinfo4.csv')

In [ ]:
engine = engineGetter.getEngine()

Add Select Wells


In [ ]:
form = widgets.VBox()
welllist = ["pw07mx 2015-11-30.csv","pw02a 11-30-2015.xle","pw02a 2016-07-07.xle","sg23b 2016-05-02.xle"]
welllist = welllist+ list(wellinfo.loc[:,'full_file_name'].values)
#print welllist

wells = widgets.SelectMultiple(description="Well", options=welllist, padding=4)
pdfName = widgets.Text(description="PDF:",padding=4)
driftTol = widgets.FloatSlider(value=0.05, min=0.00, max=10.0, step=0.05, description='Drift Tolerance:')
form.children = [wells, pdfName, driftTol]
display(form)

In [ ]:
for i in wells.value:
    print folder+'/'+i
    inputfile = folder +'/'+i
    g, drift, wellname = svdi.imp_new_well(inputfile, wellinfo, manual, baro)
    glist = g.columns.tolist()
    y1 = g['WaterElevation'].values
    y2 = baro['pw03'].values
    x1 = g.index.values
    x2 = baro.index.values
    wellname, wellid = svdi.getwellid(folder+'\\'+i,wellinfo)
    ylast = wellinfo[wellinfo['WellID']==wellid]['GroundElevation'].values[0] + wellinfo[wellinfo['WellID']==wellid]['Offset'].values[0] - svdi.fcl(manual[manual['WellID']== wellid],max(g.index.to_datetime()))[1]
    yfirst = wellinfo[wellinfo['WellID']==wellid]['GroundElevation'].values[0] + wellinfo[wellinfo['WellID']==wellid]['Offset'].values[0] - svdi.fcl(manual[manual['WellID']== wellid],min(g.index.to_datetime()))[1]
    xlast = pd.to_datetime((svdi.fcl(manual[manual['WellID']== wellid],max(pd.to_datetime(g.index)))).name)
    xfirst = pd.to_datetime((svdi.fcl(manual[manual['WellID']== wellid],min(pd.to_datetime(g.index)))).name)
    x4 = [xfirst,xlast]
    y4 = [yfirst,ylast]
    fig, ax1 = plt.subplots()
    ax1.scatter(x4,y4,color='purple')
    ax1.plot(x1,y1,color='blue',label='Water Level Elevation')
    ax1.set_ylabel('Water Level Elevation',color='blue')
    y_formatter = tick.ScalarFormatter(useOffset=False)
    ax1.yaxis.set_major_formatter(y_formatter)
    ax2 = ax1.twinx()
    ax2.set_ylabel('Barometric Pressure (ft)', color='red') 
    ax2.plot(x2,y2,color='red',label='Barometric pressure (ft)')
    h1, l1 = ax1.get_legend_handles_labels()
    h2, l2 = ax2.get_legend_handles_labels()
    ax1.legend(h1+h2, l1+l2, loc=3)
    plt.xlim(xfirst-timedelta(days=3),xlast+timedelta(days=3))
    plt.title('Well: ' + wellname.title() + '  ' + 'Total Drift = ' + str(g['DriftCorrection'][-1]))

In [ ]:
env.workspace = "C:/Users/PAULINKENBRANDT/AppData/Roaming/ESRI/Desktop10.4/ArcCatalog/UEMP_Dev.sde"
read_table = "UEMP_Dev.UEMPADMIN.GW_reading"

arcpy.env.overwriteOutput=True
edit = arcpy.da.Editor(env.workspace)
edit.startEditing(False, True)
edit.startOperation()

In [ ]:
pdf_pages = PdfPages(folder + '/' + pdfName.value + '.pdf')
for i in wells.value:
    print folder+'/'+i
    inputfile = folder +'/'+i
    g, drift, wellname = svdi.imp_new_well(inputfile, wellinfo, manual, baro)
    quer = "SELECT * FROM groundwater.reading where WellID = " + str(g['WellID'].values[0]) + " and DateTime > \'" + str(g.index.values[-1])[0:10] + " "+ str(g.index.values[-1])[11:19] + "\'"

    if abs(float(drift)) < driftTol.value:
        if len(pd.read_sql_query(sql=quer,con=engine))<1:
            g.to_csv(enteredFolder+wellname+".csv", index=False)
            tablename = 'reading'
            g.to_sql(con=engine, name = tablename, if_exists='append', flavor='mysql', index=False)
            print("Added to DB table " + tablename)
        else:
            print("Already Entered")
            print(len(pd.read_sql_query(sql=quer,con=engine)))
    else:
        g.to_csv(checkFolder+wellname+".csv", index=False, columns= ["WellID","DateTime","MeasuredLevel","Temp","BaroEfficiencyCorrected","DeltaLevel",
                                             "MeasuredDTW","DriftCorrection","DTWBelowCasing","DTWBelowGroundSurface",
                                             "WaterElevation","Tape","MeasuredBy"])
        print("Check File")
    glist = g.columns.tolist()
    for j in range(len(glist)):
        if 'pw' in glist[j]:
            h = glist[j]
    y1 = g['WaterElevation'].values
    y2 = baro['pw03'].values
    x1 = g.index.values
    x2 = baro.index.values
    wellname, wellid = svdi.getwellid(folder+'\\'+i,wellinfo)
    ylast = wellinfo[wellinfo['WellID']==wellid]['GroundElevation'].values[0] + wellinfo[wellinfo['WellID']==wellid]['Offset'].values[0] - svdi.fcl(manual[manual['WellID']== wellid],max(pd.to_datetime(g.index)))[1]
    yfirst = wellinfo[wellinfo['WellID']==wellid]['GroundElevation'].values[0] + wellinfo[wellinfo['WellID']==wellid]['Offset'].values[0] - svdi.fcl(manual[manual['WellID']== wellid],min(pd.to_datetime(g.index)))[1]
    xlast = pd.to_datetime(svdi.fcl(manual[manual['WellID']== wellid],max(pd.to_datetime(g.index))).name)
    xfirst = pd.to_datetime(svdi.fcl(manual[manual['WellID']== wellid],min(pd.to_datetime(g.index))).name)
    x4 = [xfirst,xlast]
    y4 = [yfirst,ylast]
    fig, ax1 = plt.subplots()
    ax1.scatter(x4,y4,color='purple')
    ax1.plot(x1,y1,color='blue',label='Water Level Elevation')
    ax1.set_ylabel('Water Level Elevation',color='blue')
    y_formatter = tick.ScalarFormatter(useOffset=False)
    ax1.yaxis.set_major_formatter(y_formatter)
    ax2 = ax1.twinx()
    ax2.set_ylabel('Barometric Pressure (ft)', color='red') 
    ax2.plot(x2,y2,color='red',label='Barometric pressure (ft)')
    h1, l1 = ax1.get_legend_handles_labels()
    h2, l2 = ax2.get_legend_handles_labels()
    ax1.legend(h1+h2, l1+l2, loc=3)
    plt.xlim(xfirst-timedelta(days=3),xlast+timedelta(days=3))
    plt.title('Well: ' + wellname.title() + '  ' + 'Total Drift = ' + str(g['DriftCorrection'][-1]))
    
    pdf_pages.savefig(fig)
    plt.close()
pdf_pages.close()
print("DONE!")
print("Files in "+ folder+'\\wells.pdf')
print(datetime.now())

In [ ]:
form = widgets.VBox()
driftTol = widgets.FloatSlider(value=0.05, min=0.00, max=10.0, step=0.05, description='Drift Tolerance:')
form.children = [driftTol]
display(form)

In [ ]:
pdf_pages = PdfPages(folder + '/wells.pdf')
for i in wellinfo.loc[:,'full_file_name']:
    print folder+'/'+i
    inputfile = folder +'/'+i
    g, drift, wellname = svdi.imp_new_well(inputfile, wellinfo, manual, baro)
    quer = "SELECT * FROM groundwater.reading where WellID = " + str(g['WellID'].values[0]) + " and (DateTime >= \'" + str(g.index.values[-1])[0:10] + "\')"

    if abs(float(drift)) < driftTol.value:
        if len(pd.read_sql_query(sql=quer,con=engine))<1:
            g.to_csv(enteredFolder + wellname+".csv", index=False)
            g.to_sql(con=engine, name='reading', if_exists='append', flavor='mysql', index=False)
            print("Added to DB")
        else:
            print("Already Entered")
    else:
        g.to_csv(checkFolder + wellname+".csv", index=False, columns= ["WellID","DateTime","MeasuredLevel","Temp","BaroEfficiencyCorrected","DeltaLevel",
                                             "MeasuredDTW","DriftCorrection","DTWBelowCasing","DTWBelowGroundSurface",
                                             "WaterElevation","Tape","MeasuredBy"])
        print("Check File")
    glist = g.columns.tolist()
    for j in range(len(glist)):
        if 'pw' in glist[j]:
            h = glist[j]
    y1 = g['WaterElevation'].values
    y2 = baro['pw03'].values
    x1 = g.index.values
    x2 = baro.index.values
    wellname, wellid = svdi.getwellid(folder+'\\'+i,wellinfo)
    ylast = wellinfo[wellinfo['WellID']==wellid]['GroundElevation'].values[0] + wellinfo[wellinfo['WellID']==wellid]['Offset'].values[0] - svdi.fcl(manual[manual['WellID']== wellid],max(pd.to_datetime(g.index)))[1]
    yfirst = wellinfo[wellinfo['WellID']==wellid]['GroundElevation'].values[0] + wellinfo[wellinfo['WellID']==wellid]['Offset'].values[0] - svdi.fcl(manual[manual['WellID']== wellid],min(pd.to_datetime(g.index)))[1]
    xlast = pd.to_datetime(svdi.fcl(manual[manual['WellID']== wellid],max(pd.to_datetime(g.index))).name)
    xfirst = pd.to_datetime(svdi.fcl(manual[manual['WellID']== wellid],min(pd.to_datetime(g.index))).name)
    x4 = [xfirst,xlast]
    y4 = [yfirst,ylast]
    fig, ax1 = plt.subplots()
    ax1.scatter(x4,y4,color='purple')
    ax1.plot(x1,y1,color='blue',label='Water Level Elevation')
    ax1.set_ylabel('Water Level Elevation',color='blue')
    y_formatter = tick.ScalarFormatter(useOffset=False)
    ax1.yaxis.set_major_formatter(y_formatter)
    ax2 = ax1.twinx()
    ax2.set_ylabel('Barometric Pressure (ft)', color='red') 
    ax2.plot(x2,y2,color='red',label='Barometric pressure (ft)')
    h1, l1 = ax1.get_legend_handles_labels()
    h2, l2 = ax2.get_legend_handles_labels()
    ax1.legend(h1+h2, l1+l2, loc=3)
    plt.xlim(min(pd.to_datetime(g.index))-timedelta(days=3),max(pd.to_datetime(g.index))+timedelta(days=3))
    plt.title('Well: ' + wellname.title() + '  ' + 'Total Drift = ' + str(g['DriftCorrection'][-1]))
    
    pdf_pages.savefig(fig)
    plt.close()
pdf_pages.close()
print("DONE!")
print("Files in "+ folder+'\\wells.pdf')