In [1]:
%matplotlib inline
import os
import sys
import platform
import matplotlib

import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.dates as dates
import matplotlib.ticker as tick
from matplotlib.backends.backend_pdf import PdfPages
import datetime
from pylab import rcParams
rcParams['figure.figsize'] = 15, 10

In [2]:
import loggerloader as ll
import wellapplication as wa
import arcpy

In [3]:
print("Operating System " + platform.system() + " " + platform.release())
print("Python Version " + str(sys.version))
print("Pandas Version " + str(pd.__version__))
print("Numpy Version " + str(np.__version__))
print("Matplotlib Version " + str(matplotlib.__version__))
print("Loggerloader Version {:}".format(ll.__version__))


Operating System Windows 7
Python Version 3.5.3 |Continuum Analytics, Inc.| (default, May 15 2017, 10:43:23) [MSC v.1900 64 bit (AMD64)]
Pandas Version 0.19.2
Numpy Version 1.11.2
Matplotlib Version 1.5.3
Loggerloader Version 0.2.11

In [4]:
drive = 'M:'
raw_archive_folder = drive + '/PROJECTS/Snake Valley Water/Transducer Data/Raw_data_archive'
folder = raw_archive_folder + '/2017/'
enteredFolder = folder + '/entered/'
checkFolder = folder + '/toCheck/'
#wellinfofile = drive + raw_archive_folder + '/well table 2015-03-23.csv'

In [5]:
if not os.path.exists(enteredFolder):
    print('Creating Output Folder')
    os.makedirs(enteredFolder)
else:
    print('Output Folder Exists')


Output Folder Exists

In [6]:
if not os.path.exists(checkFolder):
    print('Creating Check Folder')
    os.makedirs(checkFolder)
else:
    print('Check Folder Exists')


Check Folder Exists

Inputs for connection file and tables


In [48]:
conn_file_root = "C:/Users/PAULINKENBRANDT/AppData/Roaming/ESRI/Desktop10.4/ArcCatalog/"
conn_file = "UGS_SDE.sde" #production
arcpy.env.workspace = conn_file_root + conn_file
gw_reading_table = "UGGP.UGGPADMIN.UGS_GW_reading"
station_table =  "UGGP.UGGPADMIN.UGS_NGWMN_Monitoring_Locations"

Snake Valley Data Import

read over raw files -> append baro to db -> import well xle -> pull bp data from db -> remove bp press -> import manual measurements -> fix drift -> remove stickup -> include elevation

Import Barometric Data into Database


In [9]:
loc_table = "UGGP.UGGPADMIN.UGS_NGWMN_Monitoring_Locations"

# create empty dataframe to house well data
field_names = ['LocationID', 'LocationName', 'LocationType', 'LocationDesc', 'AltLocationID', 'Altitude',
               'AltitudeUnits', 'WellDepth', 'SiteID', 'Offset', 'LoggerType', 'BaroEfficiency',
               'BaroEfficiencyStart', 'BaroLoggerType']
df = pd.DataFrame(columns=field_names)
# populate dataframe with data from SDE well table
search_cursor = arcpy.da.SearchCursor(loc_table, field_names)
for row in search_cursor:
    # combine the field names and row items together, and append them
    df = df.append(dict(zip(field_names, row)), ignore_index=True)
df.dropna(subset=['AltLocationID'],inplace=True)
df


Out[9]:
LocationID LocationName LocationType LocationDesc AltLocationID Altitude AltitudeUnits WellDepth SiteID Offset LoggerType BaroEfficiency BaroEfficiencyStart BaroLoggerType
0 393453113515501 P1001 Well Miller Spring Piezometer 3 1001 4760.66 None 3.6 40.0 None None 0.00 None 9061
1 393452113515501 P1002 Well Miller Spring Piezometer 2 1002 4761.89 None 4 40.0 None None 0.00 None 9061
2 393454113515501 P1003 Well Miller Spring Piezometer 4 1003 4760.67 None 3.8 40.0 None None 0.00 None 9061
3 393450113515501 P1004 Well Miller Spring Piezometer 1 1004 4761.41 None 3.8 40.0 None None 0.00 None 9061
4 393501113515601 P1005 Well Miller Spring Piezometer 5 1005 4760.68 None 4 40.0 None None 0.00 None 9061
5 393500113515601 P1006 Well Miller Spring Piezometer 6 1006 4759.73 None 3.8 40.0 None None 0.00 None 9061
6 393501113515701 P1007 Well Miller Spring Piezometer 7 1007 4759.88 None 3.8 40.0 None None 0.00 None 9061
7 393506113515701 P1008 Well Miller Spring Piezometer 9 1008 4759.11 None 3.9 40.0 None None 0.00 None 9061
8 393506113515601 P1009 Well Miller Spring Piezometer 10 1009 4759.00 None 4 40.0 None None 0.00 None 9061
9 393506113515801 P1010 Well Miller Spring Piezometer 8 1010 4759.58 None 3.9 40.0 None None 0.00 None 9061
10 393329113532901 P1011 Well Leland-Harris Piezometer 2 1011 4781.28 None 4.1 59.0 None None 0.00 None 9061
11 393330113532901 P1012 Well Leland-Harris Piezometer 1 1012 4781.63 None 3.8 59.0 None None 0.00 None 9061
12 393326113533101 P1013 Well Leland-Harris Piezometer 4 1013 4781.31 None 4.1 59.0 None None 0.00 None 9061
13 393328113533101 P1014 Well Leland-Harris Piezometer 3 1014 4779.06 None 3.7 59.0 None None 0.00 None 9061
14 393328113531701 P1015 Well Leland-Harris Piezometer 5 1015 4780.00 None 4.1 60.0 None None 0.00 None 9061
15 393328113531601 P1016 Well Leland-Harris Piezometer 6 1016 4777.50 None 2.7 60.0 None None 0.00 None 9061
16 393331113531801 P1017 Well Leland-Harris Piezometer 7 1017 4778.59 None 3.9 60.0 None None 0.00 None 9061
17 393333113531801 P1018 Well Leland-Harris Piezometer 8 1018 4778.60 None 4.2 60.0 None None 0.00 None 9061
18 393335113531901 P1019 Well Leland-Harris Piezometer 9 1019 4778.36 None 3.9 60.0 None None 0.00 None 9061
19 393337113531901 P1020 Well Leland-Harris Piezometer 10 1020 4780.08 None 4.2 60.0 None None 0.00 None 9061
20 392451113523201 P1021 Well Foote Reservoir Piezometer 1 1021 4814.01 None 3.8 -1.0 None None 0.00 None 9061
21 392450113523201 P1022 Well Foote Reservoir Piezometer 2 1022 4812.49 None 3.9 -1.0 None None 0.00 None 9061
22 392448113523401 P1023 Well Foote Reservoir Piezometer 3 1023 4812.54 None 3.7 -1.0 None None 0.00 None 9061
23 392449113524601 P1024 Well Foote Reservoir Piezometer 4 1024 4808.56 None 3.9 -1.0 None None 0.00 None 9061
24 392449113524801 P1025 Well Foote Reservoir Piezometer 5 1025 4809.78 None 4 -1.0 None None 0.00 None 9061
25 392449113524901 P1026 Well Foote Reservoir Piezometer 6 1026 4807.63 None 3.9 -1.0 None None 0.00 None 9061
26 392907113545801 P1027 Well Gandy Salt Marsh Piezometer 1 1027 4794.18 None 4.1 53.0 None None 0.00 None 9061
27 392907113545901 P1028 Well Gandy Salt Marsh Piezometer 2 1028 4792.67 None 4.3 53.0 None None 0.00 None 9061
28 392906113545801 P1029 Well Gandy Salt Marsh Piezometer 3 1029 4793.25 None 4.2 53.0 None None 0.00 None 9061
29 392904113545501 P1030 Well Gandy Salt Marsh Piezometer 4 1030 4791.79 None 3.7 53.0 None None 0.00 None 9061
... ... ... ... ... ... ... ... ... ... ... ... ... ... ...
336 405512111193801 WRSMB Well None 131 5835.00 None None 49.0 -0.14 None 0.00 None None
337 405511111193901 WLSMB Well None 132 5827.00 None None 49.0 -0.38 None 0.00 None None
338 405511111193701 W-5 Well None 133 5852.00 None None 49.0 -0.13 None 0.00 None None
339 404745111570001 AE-HW1 Well None 500 4211.00 None None 102.0 None None 0.00 None None
340 404744111570001 AE-HW2 Well None 501 4211.00 None None 102.0 None None 0.00 None None
341 404744111570101 AE-FW1 Well None 502 4213.00 None None 102.0 None None 0.00 None None
342 384702114041601 Snake Valley South MX Well None 71 5581.90 None None 30.0 2.15 None 0.00 None 9003
343 390812114033901 Snake Valley North MX Well None 70 5096.10 None None 29.0 2.13 Global Water until 6/8/10, then Solinst 0.00 None 9003
344 404938110055001 None Well None 503 10469.00 None None 103.0 None None 0.00 None None
345 404938110054901 None Well None 504 10465.00 None None 103.0 None None 0.00 None None
346 404938110055002 None Well None 505 10469.00 None None 103.0 None None 0.00 None None
347 404938110054902 None Well None 506 10465.00 None None 103.0 None None 0.00 None None
348 404938110054903 None Well None 507 10465.00 None None 103.0 None None 0.00 None None
349 404943110055003 None Well None 508 10334.00 None None 103.0 None None 0.00 None None
350 384620114024901 Big Springs Creek above Dearden Ranch Spring at Burbank UT 1 5438.00 None None 36.0 None None 0.00 None None
351 384636114024101 Highline Ditch Spring at Burbank UT 2 5426.00 None None 36.0 None None 0.00 None None
352 384638114023401 East Middle Ditch Spring at Burbank UT 3 5418.00 None None 36.0 None None 0.00 None None
353 384641114023402 West Middle Ditch Spring at Burbank UT 4 5420.00 None None 36.0 None None 0.00 None None
354 384631114024501 Dearden Springs Spring at Burbank UT 5 5424.00 None None 36.0 None None 0.20 None None
355 385156113593701 Clay Spring Spring 5.5 miles SSE of Garrison UT 6 5440.00 None None 37.0 None None 0.00 None None
356 391814114003001 Kell (Beck) Spring Spring 10 miles south of Gandy UT 7 4939.00 None None 39.0 None None 0.20 None 9003
357 392411113514601 Twin Springs south Spring 7.4 miles ESE of Gandy UT 8 4819.00 None None 73.0 None None 0.00 None 9061
358 392413113515001 Twin Springs north Spring 7.4 miles ESE of Gandy UT 9 4819.00 None None 73.0 None None 0.33 None 9061
359 392456113521301 Foote Reservoir flume Spring 6.7 miles ESE of Gandy UT 10 4827.00 None None 38.0 None None 0.33 None None
360 392548113531701 Pivot north of Foote Reservoir Spring 5.5 miles ESE of Gandy UT 11 4824.00 None None 38.0 None None 0.10 None None
361 393448113515401 Miller Spring Spring 4.5 miles SE of Partoun UT 12 4771.00 None None 40.0 None None 0.25 None None
362 392459113521302 Foote Reservoir Spring (combined flow) Spring 6.7 miles ESE of Gandy UT 13 4833.00 None None 38.0 None None 0.30 None None
363 391926113391803 PW10 Baro Barometer Barometer at PW10 9027 5266.30 None None -1.0 None Solinst 0.00 None None
364 393803113161602 PW19 Baro Barometer PW19 Barometer 9049 4633.00 None None -1.0 None None 0.00 None None
365 393331113533004 SG25 Barometer Barometer SG25 Barometer 9061 4784.00 None None -1.0 None None 0.00 None None

249 rows × 14 columns


In [ ]:
xles = self.xle_head_table(self.xledir + '/')
arcpy.AddMessage('xles examined')
csvs = self.csv_info_table(self.xledir + '/')
arcpy.AddMessage('csvs examined')
file_info_table = pd.concat([xles, csvs[0]])

In [ ]:
def csv_info_table(self, folder):
    csv = {}
    files = [f for f in os.listdir(folder) if os.path.isfile(os.path.join(folder, f))]
    field_names = ['filename', 'Start_time', 'Stop_time']
    df = pd.DataFrame(columns=field_names)
    for file in files:
        fileparts = os.path.basename(file).split('.')
        filetype = fileparts[1]
        basename = fileparts[0]
        if filetype == 'csv':
            try:
                cfile = {}
                csv[basename] = self.new_csv_imp(os.path.join(folder, file))
                cfile['Battery_level'] = int(round(csv[basename].loc[csv[basename]. \
                                                   index[-1], 'Volts'] / csv[basename]. \
                                                   loc[csv[basename].index[0], 'Volts'] * 100, 0))
                cfile['Sample_rate'] = (csv[basename].index[1] - csv[basename].index[0]).seconds * 100
                cfile['filename'] = basename
                cfile['fileroot'] = basename
                cfile['full_filepath'] = os.path.join(folder, file)
                cfile['Start_time'] = csv[basename].first_valid_index()
                cfile['Stop_time'] = csv[basename].last_valid_index()
                cfile['Location'] = ' '.join(basename.split(' ')[:-1])
                cfile['trans type'] = 'Global Water'
                df = df.append(cfile, ignore_index=True)
            except:
                pass
    df.set_index('filename', inplace=True)
    return df, csv

In [33]:
files = ['pw20 20170307.xle','pw03z 20170309.xle','pw01c 20170308.csv']
file_extension = []
for file in files:
    file_extension.append(os.path.splitext(file)[1])
'.xle' in file_extension and '.csv' in file_extension


Out[33]:
True

In [ ]:
files = ['pw20 20170307.xle','pw03z 20170309.xle','pw01c 20170308.csv']
    if 'xle' in file_extension:
            xles = self.xle_head_table(dirpath)
            arcpy.AddMessage('xles examined')
            file_info_table = xles
        if 'csv' in file_extension:
            csvs = self.csv_info_table(dirpath)
            arcpy.AddMessage('csvs examined')
            file_info_table = csvs

In [58]:
file_info_table = pd.read_csv(r'M:\PROJECTS\Snake Valley Water\Transducer Data\Raw_data_archive\2017\file_info_table.csv')
maxtime = max(pd.to_datetime(file_info_table['Stop_time']))
mintime = min(pd.to_datetime(file_info_table['Start_time']))
#arcpy.AddMessage("Data span from {:} to {:}.".format(mintime,maxtime))

# upload barometric pressure data
baro_out = {}
#baros = well_table[well_table['LocationType'] == 'Barometer']
baros = []
if len(baros) < 1:
    baro_out['9003'] = get_location_data('9003', mintime, maxtime + datetime.timedelta(days=1))


['READINGDATE', 'MEASUREDLEVEL', 'TEMP', 'BAROEFFICIENCYLEVEL', 'DELTALEVEL', 'MEASUREDDTW', 'DRIFTCORRECTION', 'DTWBELOWCASING', 'DTWBELOWGROUNDSURFACE', 'WATERELEVATION', 'TAPE', 'MEASUREDBY', 'LOCATIONID']
             READINGDATE  MEASUREDLEVEL     TEMP BAROEFFICIENCYLEVEL  \
0    2015-12-02 00:00:00            NaN      NaN                None   
1    2015-12-02 01:00:00            NaN      NaN                None   
2    2015-12-02 02:00:00            NaN      NaN                None   
3    2015-12-02 03:00:00            NaN      NaN                None   
4    2015-12-02 04:00:00            NaN      NaN                None   
5    2015-12-02 05:00:00            NaN      NaN                None   
6    2015-12-02 06:00:00            NaN      NaN                None   
7    2015-12-02 07:00:00            NaN      NaN                None   
8    2015-12-02 08:00:00            NaN      NaN                None   
9    2015-12-02 09:00:00            NaN      NaN                None   
10   2015-12-02 10:00:00            NaN      NaN                None   
11   2015-12-02 11:00:00            NaN      NaN                None   
12   2015-12-02 12:00:00            NaN      NaN                None   
13   2015-12-02 13:00:00            NaN      NaN                None   
14   2015-12-02 14:00:00            NaN      NaN                None   
15   2015-12-02 15:00:00            NaN      NaN                None   
16   2015-12-02 16:00:00            NaN      NaN                None   
17   2015-12-02 17:00:00            NaN      NaN                None   
18   2015-12-02 18:00:00         2.9280   3.3304                None   
19   2015-12-02 19:00:00         2.9285   0.5065                None   
20   2015-12-02 20:00:00         2.9330  -1.5090                None   
21   2015-12-02 21:00:00         2.9369  -2.8679                None   
22   2015-12-02 22:00:00         2.9429  -3.6669                None   
23   2015-12-02 23:00:00         2.9455  -4.3556                None   
24   2015-12-03 00:00:00         2.9364  -5.0680                None   
25   2015-12-03 01:00:00         2.9224  -5.5539                None   
26   2015-12-03 02:00:00         2.9147  -5.9264                None   
27   2015-12-03 03:00:00         2.9051  -6.1721                None   
28   2015-12-03 04:00:00         2.8908  -5.9852                None   
29   2015-12-03 05:00:00         2.8801  -5.2015                None   
...                  ...            ...      ...                 ...   
5874 2016-08-02 18:00:00         2.7382  36.6386                None   
5875 2016-08-02 19:00:00         2.7539  34.4482                None   
5876 2016-08-02 20:00:00         2.7704  31.5855                None   
5877 2016-08-02 21:00:00         2.7734  29.5872                None   
5878 2016-08-02 22:00:00         2.7793  28.5137                None   
5879 2016-08-02 23:00:00         2.8093  26.5671                None   
5880 2016-08-03 00:00:00         2.8098  23.8924                None   
5881 2016-08-03 01:00:00         2.7877  22.0040                None   
5882 2016-08-03 02:00:00         2.7823  21.2675                None   
5883 2016-08-03 03:00:00         2.7695  21.1249                None   
5884 2016-08-03 04:00:00         2.7639  20.6274                None   
5885 2016-08-03 05:00:00         2.7645  20.4072                None   
5886 2016-08-03 06:00:00         2.7711  21.1734                None   
5887 2016-08-03 07:00:00         2.7866  23.4834                None   
5888 2016-08-03 08:00:00         2.8018  27.0117                None   
5889 2016-08-03 09:00:00         2.8072  30.8763                None   
5890 2016-08-03 10:00:00         2.8077  35.2521                None   
5891 2016-08-03 11:00:00         2.8014  39.4757                None   
5892 2016-08-03 12:00:00         2.7831  42.2798                None   
5893 2016-08-03 13:00:00         2.7540  42.2075                None   
5894 2016-08-03 14:00:00         2.7355  41.0800                None   
5895 2016-08-03 16:00:00         2.7485  28.0282                None   
5896 2016-08-03 17:00:00         2.7403  31.5124                None   
5897 2016-08-03 18:00:00         2.7298  32.6257                None   
5898 2016-08-03 19:00:00         2.7326  30.7336                None   
5899 2016-08-03 20:00:00         2.7629  27.7507                None   
5900 2016-08-03 21:00:00         2.7842  25.0177                None   
5901 2016-08-03 22:00:00         2.7821  23.2884                None   
5902 2016-08-03 23:00:00         2.7799  22.1864                None   
5903 2016-08-04 00:00:00         2.7749  21.5609                None   

     DELTALEVEL MEASUREDDTW DRIFTCORRECTION DTWBELOWCASING  \
0          None        None            None           None   
1          None        None            None           None   
2          None        None            None           None   
3          None        None            None           None   
4          None        None            None           None   
5          None        None            None           None   
6          None        None            None           None   
7          None        None            None           None   
8          None        None            None           None   
9          None        None            None           None   
10         None        None            None           None   
11         None        None            None           None   
12         None        None            None           None   
13         None        None            None           None   
14         None        None            None           None   
15         None        None            None           None   
16         None        None            None           None   
17         None        None            None           None   
18         None        None            None           None   
19         None        None            None           None   
20         None        None            None           None   
21         None        None            None           None   
22         None        None            None           None   
23         None        None            None           None   
24         None        None            None           None   
25         None        None            None           None   
26         None        None            None           None   
27         None        None            None           None   
28         None        None            None           None   
29         None        None            None           None   
...         ...         ...             ...            ...   
5874       None        None            None           None   
5875       None        None            None           None   
5876       None        None            None           None   
5877       None        None            None           None   
5878       None        None            None           None   
5879       None        None            None           None   
5880       None        None            None           None   
5881       None        None            None           None   
5882       None        None            None           None   
5883       None        None            None           None   
5884       None        None            None           None   
5885       None        None            None           None   
5886       None        None            None           None   
5887       None        None            None           None   
5888       None        None            None           None   
5889       None        None            None           None   
5890       None        None            None           None   
5891       None        None            None           None   
5892       None        None            None           None   
5893       None        None            None           None   
5894       None        None            None           None   
5895       None        None            None           None   
5896       None        None            None           None   
5897       None        None            None           None   
5898       None        None            None           None   
5899       None        None            None           None   
5900       None        None            None           None   
5901       None        None            None           None   
5902       None        None            None           None   
5903       None        None            None           None   

     DTWBELOWGROUNDSURFACE WATERELEVATION  TAPE MEASUREDBY LOCATIONID  
0                     None           None  None       None       9003  
1                     None           None  None       None       9003  
2                     None           None  None       None       9003  
3                     None           None  None       None       9003  
4                     None           None  None       None       9003  
5                     None           None  None       None       9003  
6                     None           None  None       None       9003  
7                     None           None  None       None       9003  
8                     None           None  None       None       9003  
9                     None           None  None       None       9003  
10                    None           None  None       None       9003  
11                    None           None  None       None       9003  
12                    None           None  None       None       9003  
13                    None           None  None       None       9003  
14                    None           None  None       None       9003  
15                    None           None  None       None       9003  
16                    None           None  None       None       9003  
17                    None           None  None       None       9003  
18                    None           None  None       None       9003  
19                    None           None  None       None       9003  
20                    None           None  None       None       9003  
21                    None           None  None       None       9003  
22                    None           None  None       None       9003  
23                    None           None  None       None       9003  
24                    None           None  None       None       9003  
25                    None           None  None       None       9003  
26                    None           None  None       None       9003  
27                    None           None  None       None       9003  
28                    None           None  None       None       9003  
29                    None           None  None       None       9003  
...                    ...            ...   ...        ...        ...  
5874                  None           None  None       None       9003  
5875                  None           None  None       None       9003  
5876                  None           None  None       None       9003  
5877                  None           None  None       None       9003  
5878                  None           None  None       None       9003  
5879                  None           None  None       None       9003  
5880                  None           None  None       None       9003  
5881                  None           None  None       None       9003  
5882                  None           None  None       None       9003  
5883                  None           None  None       None       9003  
5884                  None           None  None       None       9003  
5885                  None           None  None       None       9003  
5886                  None           None  None       None       9003  
5887                  None           None  None       None       9003  
5888                  None           None  None       None       9003  
5889                  None           None  None       None       9003  
5890                  None           None  None       None       9003  
5891                  None           None  None       None       9003  
5892                  None           None  None       None       9003  
5893                  None           None  None       None       9003  
5894                  None           None  None       None       9003  
5895                  None           None  None       None       9003  
5896                  None           None  None       None       9003  
5897                  None           None  None       None       9003  
5898                  None           None  None       None       9003  
5899                  None           None  None       None       9003  
5900                  None           None  None       None       9003  
5901                  None           None  None       None       9003  
5902                  None           None  None       None       9003  
5903                  None           None  None       None       9003  

[5904 rows x 13 columns]

In [56]:
def get_location_data(site_number, first_date=None, last_date=None, limit=None,
                      gw_reading_table="UGGP.UGGPADMIN.UGS_GW_reading"):

    if not first_date:
        first_date = datetime.datetime(1900, 1, 1)
    elif type(first_date) == str:
        try:
            datetime.datetime.strptime(first_date, '%m/%d/%Y')
        except:
            first_date = datetime.datetime(1900, 1, 1)
    # Get last reading at the specified location
    if not last_date or last_date > datetime.datetime.now():
        last_date = datetime.datetime.now()
    query_txt = "LOCATIONID = '{:}' and (READINGDATE >= '{:%m/%d/%Y}' and READINGDATE <= '{:%m/%d/%Y}')"
    query = query_txt.format(site_number, first_date, last_date + datetime.timedelta(days=1))
    sql_sn = (limit, 'ORDER BY READINGDATE ASC')

    fieldnames = ll.get_field_names(gw_reading_table)
    print(fieldnames)
    readings = ll.table_to_pandas_dataframe(gw_reading_table, fieldnames, query, sql_sn)
    print(readings)
    readings.set_index('READINGDATE', inplace=True)
    if len(readings) == 0:
        arcpy.AddMessage('No Records for location {:}'.format(site_number))
    return readings

In [50]:
baro_out['9003']


Out[50]:
MEASUREDLEVEL TEMP BAROEFFICIENCYLEVEL DELTALEVEL MEASUREDDTW DRIFTCORRECTION DTWBELOWCASING DTWBELOWGROUNDSURFACE WATERELEVATION TAPE MEASUREDBY LOCATIONID
READINGDATE

In [21]:
shutil.rmtree(dirpath)

In [22]:
ll.get_field_names(gw_reading_table)


Out[22]:
['READINGDATE',
 'MEASUREDLEVEL',
 'TEMP',
 'BAROEFFICIENCYLEVEL',
 'DELTALEVEL',
 'MEASUREDDTW',
 'DRIFTCORRECTION',
 'DTWBELOWCASING',
 'DTWBELOWGROUNDSURFACE',
 'WATERELEVATION',
 'TAPE',
 'MEASUREDBY',
 'LOCATIONID']

In [17]:
os.listdir(dirpath)


---------------------------------------------------------------------------
NameError                                 Traceback (most recent call last)
<ipython-input-17-b1aa8e435e47> in <module>()
----> 1 os.listdir(dirpath)

NameError: name 'dirpath' is not defined

In [ ]:
wellimp.well_files

In [10]:
xles = ll.xle_head_table(folder)
csvs = ll.csv_info_table(folder)
file_info_table = pd.concat([xles, csvs[0]])
file_info_table.columns


---------------------------------------------------------------------------
EmptyDataError                            Traceback (most recent call last)
<ipython-input-10-6ef61f0874ce> in <module>()
      1 xles = ll.xle_head_table(folder)
----> 2 csvs = ll.csv_info_table(folder)
      3 file_info_table = pd.concat([xles, csvs[0]])
      4 file_info_table.columns

C:\Program Files\ArcGIS\Pro\bin\Python\envs\arcgispro-py3\lib\site-packages\loggerloader\transport.py in csv_info_table(folder)
    554             try:
    555                 cfile = {}
--> 556                 csv[basename] = new_csv_imp(os.path.join(folder, file))
    557                 cfile['Battery_level'] = int(round(csv[basename].loc[csv[basename]. \
    558                                                    index[-1], 'Volts'] / csv[basename]. \

C:\Program Files\ArcGIS\Pro\bin\Python\envs\arcgispro-py3\lib\site-packages\loggerloader\transport.py in new_csv_imp(infile)
    155         A Pandas DataFrame containing the transducer data
    156     """
--> 157     f = pd.read_csv(infile, skiprows=1, parse_dates=[[0, 1]])
    158     # f = f.reset_index()
    159     f['DateTime'] = pd.to_datetime(f['Date_ Time'], errors='coerce')

C:\Program Files\ArcGIS\Pro\bin\Python\envs\arcgispro-py3\lib\site-packages\pandas\io\parsers.py in parser_f(filepath_or_buffer, sep, delimiter, header, names, index_col, usecols, squeeze, prefix, mangle_dupe_cols, dtype, engine, converters, true_values, false_values, skipinitialspace, skiprows, nrows, na_values, keep_default_na, na_filter, verbose, skip_blank_lines, parse_dates, infer_datetime_format, keep_date_col, date_parser, dayfirst, iterator, chunksize, compression, thousands, decimal, lineterminator, quotechar, quoting, escapechar, comment, encoding, dialect, tupleize_cols, error_bad_lines, warn_bad_lines, skipfooter, skip_footer, doublequote, delim_whitespace, as_recarray, compact_ints, use_unsigned, low_memory, buffer_lines, memory_map, float_precision)
    644                     skip_blank_lines=skip_blank_lines)
    645 
--> 646         return _read(filepath_or_buffer, kwds)
    647 
    648     parser_f.__name__ = name

C:\Program Files\ArcGIS\Pro\bin\Python\envs\arcgispro-py3\lib\site-packages\pandas\io\parsers.py in _read(filepath_or_buffer, kwds)
    387 
    388     # Create the parser.
--> 389     parser = TextFileReader(filepath_or_buffer, **kwds)
    390 
    391     if (nrows is not None) and (chunksize is not None):

C:\Program Files\ArcGIS\Pro\bin\Python\envs\arcgispro-py3\lib\site-packages\pandas\io\parsers.py in __init__(self, f, engine, **kwds)
    728             self.options['has_index_names'] = kwds['has_index_names']
    729 
--> 730         self._make_engine(self.engine)
    731 
    732     def close(self):

C:\Program Files\ArcGIS\Pro\bin\Python\envs\arcgispro-py3\lib\site-packages\pandas\io\parsers.py in _make_engine(self, engine)
    921     def _make_engine(self, engine='c'):
    922         if engine == 'c':
--> 923             self._engine = CParserWrapper(self.f, **self.options)
    924         else:
    925             if engine == 'python':

C:\Program Files\ArcGIS\Pro\bin\Python\envs\arcgispro-py3\lib\site-packages\pandas\io\parsers.py in __init__(self, src, **kwds)
   1388         kwds['allow_leading_cols'] = self.index_col is not False
   1389 
-> 1390         self._reader = _parser.TextReader(src, **kwds)
   1391 
   1392         # XXX

pandas\parser.pyx in pandas.parser.TextReader.__cinit__ (pandas\parser.c:6171)()

EmptyDataError: No columns to parse from file

In [ ]:


In [43]:
wellid = '43'
station_table =  "UGGP.UGGPADMIN.UGS_NGWMN_Monitoring_Locations"
#arcpy.env.workspace = self.sde_conn
loc_table = "UGGP.UGGPADMIN.UGS_NGWMN_Monitoring_Locations"

field_names = ['LocationID', 'LocationName', 'LocationType', 'LocationDesc', 'AltLocationID', 'Altitude', 
               'AltitudeUnits', 'WellDepth', 'SiteID', 'Offset', 'LoggerType', 'BaroEfficiency', 
               'BaroEfficiencyStart', 'BaroLoggerType']
df = pd.DataFrame(columns=field_names)

# use a search cursor to iterate rows
search_cursor = arcpy.da.SearchCursor(loc_table, field_names)

    # iterate the rows
for row in search_cursor:
        # combine the field names and row items together, and append them
    df = df.append(dict(zip(field_names, row)), ignore_index=True)
    
iddict = dict(zip(df['LocationName'].values,df['AltLocationID'].values))

well_table = df.set_index(['AltLocationID'])
baroid = well_table.loc[wellid,'BaroLoggerType']
stickup = well_table.loc[wellid,'Offset']
well_elev = well_table.loc[wellid,'Altitude']
be = well_table.loc[wellid,'BaroEfficiency']

In [40]:



Out[40]:
LocationID LocationName LocationType LocationDesc AltLocationID Altitude AltitudeUnits WellDepth SiteID Offset LoggerType BaroEfficiency BaroEfficiencyStart BaroLoggerType
0 393453113515501 P1001 Well Miller Spring Piezometer 3 1001 4760.66 None 3.6 40 None None 0.00 None 9061
1 393452113515501 P1002 Well Miller Spring Piezometer 2 1002 4761.89 None 4 40 None None 0.00 None 9061
2 393454113515501 P1003 Well Miller Spring Piezometer 4 1003 4760.67 None 3.8 40 None None 0.00 None 9061
3 393450113515501 P1004 Well Miller Spring Piezometer 1 1004 4761.41 None 3.8 40 None None 0.00 None 9061
4 393501113515601 P1005 Well Miller Spring Piezometer 5 1005 4760.68 None 4 40 None None 0.00 None 9061
5 393500113515601 P1006 Well Miller Spring Piezometer 6 1006 4759.73 None 3.8 40 None None 0.00 None 9061
6 393501113515701 P1007 Well Miller Spring Piezometer 7 1007 4759.88 None 3.8 40 None None 0.00 None 9061
7 393506113515701 P1008 Well Miller Spring Piezometer 9 1008 4759.11 None 3.9 40 None None 0.00 None 9061
8 393506113515601 P1009 Well Miller Spring Piezometer 10 1009 4759.00 None 4 40 None None 0.00 None 9061
9 393506113515801 P1010 Well Miller Spring Piezometer 8 1010 4759.58 None 3.9 40 None None 0.00 None 9061
10 393329113532901 P1011 Well Leland-Harris Piezometer 2 1011 4781.28 None 4.1 59 None None 0.00 None 9061
11 393330113532901 P1012 Well Leland-Harris Piezometer 1 1012 4781.63 None 3.8 59 None None 0.00 None 9061
12 393326113533101 P1013 Well Leland-Harris Piezometer 4 1013 4781.31 None 4.1 59 None None 0.00 None 9061
13 393328113533101 P1014 Well Leland-Harris Piezometer 3 1014 4779.06 None 3.7 59 None None 0.00 None 9061
14 393328113531701 P1015 Well Leland-Harris Piezometer 5 1015 4780.00 None 4.1 60 None None 0.00 None 9061
15 393328113531601 P1016 Well Leland-Harris Piezometer 6 1016 4777.50 None 2.7 60 None None 0.00 None 9061
16 393331113531801 P1017 Well Leland-Harris Piezometer 7 1017 4778.59 None 3.9 60 None None 0.00 None 9061
17 393333113531801 P1018 Well Leland-Harris Piezometer 8 1018 4778.60 None 4.2 60 None None 0.00 None 9061
18 393335113531901 P1019 Well Leland-Harris Piezometer 9 1019 4778.36 None 3.9 60 None None 0.00 None 9061
19 393337113531901 P1020 Well Leland-Harris Piezometer 10 1020 4780.08 None 4.2 60 None None 0.00 None 9061
20 392451113523201 P1021 Well Foote Reservoir Piezometer 1 1021 4814.01 None 3.8 -1 None None 0.00 None 9061
21 392450113523201 P1022 Well Foote Reservoir Piezometer 2 1022 4812.49 None 3.9 -1 None None 0.00 None 9061
22 392448113523401 P1023 Well Foote Reservoir Piezometer 3 1023 4812.54 None 3.7 -1 None None 0.00 None 9061
23 392449113524601 P1024 Well Foote Reservoir Piezometer 4 1024 4808.56 None 3.9 -1 None None 0.00 None 9061
24 392449113524801 P1025 Well Foote Reservoir Piezometer 5 1025 4809.78 None 4 -1 None None 0.00 None 9061
25 392449113524901 P1026 Well Foote Reservoir Piezometer 6 1026 4807.63 None 3.9 -1 None None 0.00 None 9061
26 392907113545801 P1027 Well Gandy Salt Marsh Piezometer 1 1027 4794.18 None 4.1 53 None None 0.00 None 9061
27 392907113545901 P1028 Well Gandy Salt Marsh Piezometer 2 1028 4792.67 None 4.3 53 None None 0.00 None 9061
28 392906113545801 P1029 Well Gandy Salt Marsh Piezometer 3 1029 4793.25 None 4.2 53 None None 0.00 None 9061
29 392904113545501 P1030 Well Gandy Salt Marsh Piezometer 4 1030 4791.79 None 3.7 53 None None 0.00 None 9061
... ... ... ... ... ... ... ... ... ... ... ... ... ... ...
336 405512111193801 WRSMB Well None 131 5835.00 None None 49 -0.14 None 0.00 None None
337 405511111193901 WLSMB Well None 132 5827.00 None None 49 -0.38 None 0.00 None None
338 405511111193701 W-5 Well None 133 5852.00 None None 49 -0.13 None 0.00 None None
339 404745111570001 AE-HW1 Well None 500 4211.00 None None 102 None None 0.00 None None
340 404744111570001 AE-HW2 Well None 501 4211.00 None None 102 None None 0.00 None None
341 404744111570101 AE-FW1 Well None 502 4213.00 None None 102 None None 0.00 None None
342 384702114041601 Snake Valley South MX Well None 71 5581.90 None None 30 2.15 None 0.00 None 9003
343 390812114033901 Snake Valley North MX Well None 70 5096.10 None None 29 2.13 Global Water until 6/8/10, then Solinst 0.00 None 9003
344 404938110055001 None Well None 503 10469.00 None None 103 None None 0.00 None None
345 404938110054901 None Well None 504 10465.00 None None 103 None None 0.00 None None
346 404938110055002 None Well None 505 10469.00 None None 103 None None 0.00 None None
347 404938110054902 None Well None 506 10465.00 None None 103 None None 0.00 None None
348 404938110054903 None Well None 507 10465.00 None None 103 None None 0.00 None None
349 404943110055003 None Well None 508 10334.00 None None 103 None None 0.00 None None
350 384620114024901 Big Springs Creek above Dearden Ranch Spring at Burbank UT 1 5438.00 None None 36 None None 0.00 None None
351 384636114024101 Highline Ditch Spring at Burbank UT 2 5426.00 None None 36 None None 0.00 None None
352 384638114023401 East Middle Ditch Spring at Burbank UT 3 5418.00 None None 36 None None 0.00 None None
353 384641114023402 West Middle Ditch Spring at Burbank UT 4 5420.00 None None 36 None None 0.00 None None
354 384631114024501 Dearden Springs Spring at Burbank UT 5 5424.00 None None 36 None None 0.20 None None
355 385156113593701 Clay Spring Spring 5.5 miles SSE of Garrison UT 6 5440.00 None None 37 None None 0.00 None None
356 391814114003001 Kell (Beck) Spring Spring 10 miles south of Gandy UT 7 4939.00 None None 39 None None 0.20 None 9003
357 392411113514601 Twin Springs south Spring 7.4 miles ESE of Gandy UT 8 4819.00 None None 73 None None 0.00 None 9061
358 392413113515001 Twin Springs north Spring 7.4 miles ESE of Gandy UT 9 4819.00 None None 73 None None 0.33 None 9061
359 392456113521301 Foote Reservoir flume Spring 6.7 miles ESE of Gandy UT 10 4827.00 None None 38 None None 0.33 None None
360 392548113531701 Pivot north of Foote Reservoir Spring 5.5 miles ESE of Gandy UT 11 4824.00 None None 38 None None 0.10 None None
361 393448113515401 Miller Spring Spring 4.5 miles SE of Partoun UT 12 4771.00 None None 40 None None 0.25 None None
362 392459113521302 Foote Reservoir Spring (combined flow) Spring 6.7 miles ESE of Gandy UT 13 4833.00 None None 38 None None 0.30 None None
363 391926113391803 PW10 Baro None Barometer at PW10 9027 5266.30 None None -1 None Solinst 0.00 None None
364 393803113161602 PW19 Baro None PW19 Barometer 9049 4633.00 None None -1 None None 0.00 None None
365 393331113533004 SG25 Barometer None SG25 Barometer 9061 4784.00 None None -1 None None 0.00 None None

366 rows × 14 columns


In [31]:
['LocationID', 'LocationName', 'LocationType', 'LocationDesc', 'AltLocationID', 'Altitude', 'AltitudeUnits',
 'WellDepth', 'SiteID', 'Offset', 'LoggerType', 'BaroEfficiency', 'BaroEfficiencyStart', 'BaroLoggerType']


---------------------------------------------------------------------------
TypeError                                 Traceback (most recent call last)
<ipython-input-31-28d4e564afad> in <module>()
----> 1 sorted(loc_names)

TypeError: unorderable types: NoneType() < str()

In [14]:
fc = "UGGP.UGGPADMIN.UGS_NGWMN_Monitoring_Locations"
field = "LocationName"
cursor = arcpy.SearchCursor(fc)
ll.table_to_pandas_dataframe(fc, field_names=['LocationName''AltLocationID'])
for row in cursor:
    print(row.getValue(field))


Out[14]:
{'AE-FW1': '502',
 'AE-HW1': '500',
 'AE-HW2': '501',
 'AG13A': '33',
 'AG13B': '34',
 'AG13C': '35',
 'AG14A': '36',
 'AG14B': '37',
 'AG14C': '38',
 'AG15': '39',
 'AG16A': '40',
 'AG16B': '41',
 'AG16C': '42',
 'B-1': '81',
 'B-1 (15)': '100',
 'B-105': '85',
 'B-107': '86',
 'B-109': '87',
 'B-15B': '93',
 'B-16': '89',
 'B-2': '82',
 'B-2 (20)': '101',
 'B-2 (30)': '102',
 'B-3': '103',
 'B-4': '104',
 'B-5': '105',
 'B-6': '106',
 'B-7': '107',
 'B-8 (30)': '109',
 'B-8 (50)': '110',
 'B-9': '111',
 'BARO1 at Leland-Harris': '9025',
 'BH121': '126',
 'BH123': '125',
 'BH125': '127',
 'BL-P1': '134',
 'BL-P2': '135',
 'Big Springs Creek above Dearden Ranch': '1',
 'CHWENT': '138',
 'CHWNAV': '139',
 'Callao C119': '136',
 'Central Tule MX': '75',
 'Clay Spring': '6',
 'Coyote Knolls MX': '46',
 'Dearden Springs': '5',
 'East Middle Ditch': '3',
 'Eskdale MX': '73',
 'Foote Reservoir Spring (combined flow)': '13',
 'Foote Reservoir flume': '10',
 'GP1': '123',
 'GP2': '124',
 'Highline Ditch': '2',
 'Kell (Beck) Spring': '7',
 'MW-2': '112',
 'Miller Spring': '12',
 'Needle Point 23a': '72',
 'P-1': '76',
 'P-3': '137',
 'P-4': '78',
 'P-5': '79',
 'P1001': '1001',
 'P1002': '1002',
 'P1003': '1003',
 'P1004': '1004',
 'P1005': '1005',
 'P1006': '1006',
 'P1007': '1007',
 'P1008': '1008',
 'P1009': '1009',
 'P1010': '1010',
 'P1011': '1011',
 'P1012': '1012',
 'P1013': '1013',
 'P1014': '1014',
 'P1015': '1015',
 'P1016': '1016',
 'P1017': '1017',
 'P1018': '1018',
 'P1019': '1019',
 'P1020': '1020',
 'P1021': '1021',
 'P1022': '1022',
 'P1023': '1023',
 'P1024': '1024',
 'P1025': '1025',
 'P1026': '1026',
 'P1027': '1027',
 'P1028': '1028',
 'P1029': '1029',
 'P1030': '1030',
 'P1031': '1031',
 'P1032': '1032',
 'P1033': '1033',
 'P1034': '1034',
 'P1035': '1035',
 'P1036': '1036',
 'P1037': '1037',
 'P1038': '1038',
 'P1039': '1039',
 'P1040': '1040',
 'P1041': '1041',
 'P1042': '1042',
 'P1043': '1043',
 'P1044': '1044',
 'P1045': '1045',
 'P1046': '1046',
 'P1047': '1047',
 'P1048': '1048',
 'P1049': '1049',
 'P1050': '1050',
 'P1051': '1051',
 'P1052': '1052',
 'P1053': '1053',
 'P1054': '1054',
 'P1055': '1055',
 'P1056': '1056',
 'P1057': '1057',
 'P1058': '1058',
 'P1059': '1059',
 'P1060': '1060',
 'P1061': '1061',
 'P1062': '1062',
 'P1063': '1063',
 'P1064': '1064',
 'P1065': '1065',
 'P1066': '1066',
 'P1067': '1067',
 'P1068': '1068',
 'P1069': '1069',
 'P1070': '1070',
 'P1071': '1071',
 'P1072': '1072',
 'P1073': '1073',
 'P1075': '1075',
 'P1076': '1076',
 'P1077': '1077',
 'P1078': '1078',
 'P1079': '1079',
 'P1080': '1080',
 'P1081': '1081',
 'P1090': '1090',
 'P1091': '1091',
 'P1092': '1092',
 'P1093': '1093',
 'P1094': '1094',
 'P1095': '1095',
 'P1096': '1096',
 'P1097': '1097',
 'P1098': '1098',
 'P2001': '2001',
 'P2002': '2002',
 'P2003': '2003',
 'P3001': '3001',
 'P3002': '3002',
 'P3003': '3003',
 'PW01A': '1',
 'PW01B': '2',
 'PW01C': '3',
 'PW02A': '4',
 'PW02B': '5',
 'PW03 Baro': '9003',
 'PW03A': '6',
 'PW03B': '7',
 'PW03Z': '8',
 'PW04A': '9',
 'PW04B': '10',
 'PW05A': '11',
 'PW05B': '12',
 'PW05C': '13',
 'PW06A': '14',
 'PW06B': '15',
 'PW06C': '16',
 'PW06D': '17',
 'PW06MX': '18',
 'PW07A': '19',
 'PW07B': '20',
 'PW07MX': '21',
 'PW08A': '22',
 'PW08B': '23',
 'PW09A': '24',
 'PW09B': '25',
 'PW10 Baro': '9027',
 'PW10A': '26',
 'PW10B': '27',
 'PW11B': '28',
 'PW11C': '29',
 'PW11D': '30',
 'PW11E': '31',
 'PW12': '32',
 'PW17A': '43',
 'PW17B': '44',
 'PW17C': '45',
 'PW18': '47',
 'PW19 Baro': '9049',
 'PW19A': '48',
 'PW19B': '49',
 'PW19C': '50',
 'PW20': '51',
 'Pivot north of Foote Reservoir': '11',
 'SG21A': '52',
 'SG21B': '53',
 'SG21C': '54',
 'SG22A': '55',
 'SG22B': '56',
 'SG23B': '57',
 'SG24A': '58',
 'SG24B': '59',
 'SG24C': '60',
 'SG25 Barometer': '9061',
 'SG25A': '61',
 'SG25B': '62',
 'SG25C': '63',
 'SG25D': '64',
 'SG26A': '65',
 'SG26B': '66',
 'SG26C': '67',
 'SG27': '68',
 'SH-1': '108',
 'Shell-Baker': '74',
 'Snake Valley North MX': '70',
 'Snake Valley South MX': '71',
 'TP-4': '88',
 'Twin Springs Baro': '9024',
 'Twin Springs MX': '69',
 'Twin Springs north': '9',
 'Twin Springs south': '8',
 'W-1': '94',
 'W-2': '130',
 'W-3': '129',
 'W-4': '96',
 'W-5': '133',
 'W-C1': '97',
 'W-C2': '98',
 'WLSMB': '132',
 'WRSMB': '131',
 'West Middle Ditch': '4'}

In [15]:
os.environ.get('USERNAME')


Out[15]:
'paulinkenbrandt'

In [16]:
os.path.splitext(well_file)[1] == '.xle'


Out[16]:
True

In [14]:
well_file = folder+'pw03z 20170309.xle'
baro_file = folder+'pw03baro 20170309.xle'
#ll.imp_one_well(well_file,baro_file,)

In [ ]:
well_table = ll.match_files_to_wellid(folder)
#query barometers to a single table

well_table.to_pickle(folder+'well_table.pickle')

In [ ]:
bpunits = []
baroid = well_table.loc[wellid,'BaroLoggerType']
stickup = well_table.loc[wellid,'Offset']
well_elev = well_table.loc[wellid,'Altitude']
be = well_table.loc[wellid,'BaroEfficiency']

Run Transducer Processing


In [13]:
well_table = ll.match_files_to_wellid(folder)
#query barometers to a single table

well_table.to_pickle(folder+'well_table.pickle')


---------------------------------------------------------------------------
ValueError                                Traceback (most recent call last)
<ipython-input-13-c3f14f0caf49> in <module>()
----> 1 well_table = ll.match_files_to_wellid(folder)
      2 #query barometers to a single table
      3 
      4 well_table.to_pickle(folder+'well_table.pickle')

C:\Program Files\ArcGIS\Pro\bin\Python\envs\arcgispro-py3\lib\site-packages\loggerloader\transport.py in match_files_to_wellid(folder, station_table)
   1217             return try_match
   1218 
-> 1219     well_table['WellID'] = well_table[['Location', 'fileroot']].apply(lambda x: tryfile(x), 1)
   1220     well_table['baronum'] = well_table['WellID'].apply(lambda x: bdict.get(x), 1)
   1221     well_table['Altitude'] = well_table['WellID'].apply(lambda x: elevdict.get(x), 1)

C:\Program Files\ArcGIS\Pro\bin\Python\envs\arcgispro-py3\lib\site-packages\pandas\core\frame.py in apply(self, func, axis, broadcast, raw, reduce, args, **kwds)
   4358                         f, axis,
   4359                         reduce=reduce,
-> 4360                         ignore_failures=ignore_failures)
   4361             else:
   4362                 return self._apply_broadcast(f, axis)

C:\Program Files\ArcGIS\Pro\bin\Python\envs\arcgispro-py3\lib\site-packages\pandas\core\frame.py in _apply_standard(self, func, axis, ignore_failures, reduce)
   4454             try:
   4455                 for i, v in enumerate(series_gen):
-> 4456                     results[i] = func(v)
   4457                     keys.append(v.name)
   4458             except Exception as e:

C:\Program Files\ArcGIS\Pro\bin\Python\envs\arcgispro-py3\lib\site-packages\loggerloader\transport.py in <lambda>(x)
   1217             return try_match
   1218 
-> 1219     well_table['WellID'] = well_table[['Location', 'fileroot']].apply(lambda x: tryfile(x), 1)
   1220     well_table['baronum'] = well_table['WellID'].apply(lambda x: bdict.get(x), 1)
   1221     well_table['Altitude'] = well_table['WellID'].apply(lambda x: elevdict.get(x), 1)

C:\Program Files\ArcGIS\Pro\bin\Python\envs\arcgispro-py3\lib\site-packages\loggerloader\transport.py in tryfile(x)
   1210         nameparts = str(x[1]).split(' ')
   1211         try_match = iddict.get(loc_name_strip)
-> 1212         if try_match is None or int(str(try_match)) > 140:
   1213             file_name_strip = str(' '.join(nameparts[:-1])).lower().replace(" ", "").replace("-", "")
   1214             wl_value = iddict.get(file_name_strip)

ValueError: ("invalid literal for int() with base 10: '33.0'", 'occurred at index ag13a 20160802')

In [ ]:
maxtime = max(pd.to_datetime(well_table['Stop_time']))
mintime = min(pd.to_datetime(well_table['Start_time']))
print('Pulling Barometric Pressure data from {:} to {:}'.format(mintime, maxtime))

In [ ]:
bpunits = []
for ind in well_table.index:
    if 'baro' in str(ind) or 'baro' in str(well_table.loc[ind,'Location']):
        bpunits.append(well_table.loc[ind,'WellID'])

baro_out = {}
for baroid in bpunits:
    baro_out[baroid] = ll.get_location_data(gw_reading_table, baroid, mintime, 
                                   maxtime + datetime.timedelta(days=1))
    baro_out[baroid].to_pickle(folder+str(baroid)+'.pickle')

Bring in existing data


In [9]:
well_table =pd.read_pickle(folder+'well_table.pickle')

bpunits = []
for ind in well_table.index:
    if 'baro' in str(ind) or 'baro' in str(well_table.loc[ind,'Location']):
        bpunits.append(well_table.loc[ind,'WellID'])

baro_out = {}
for baroid in bpunits:
    baro_out[baroid] = pd.read_pickle(folder + str(baroid) + '.pickle')

In [10]:
manualwls = raw_archive_folder + '/All tape measurements.csv'
manual = pd.read_csv(manualwls, index_col="DateTime", engine="python")

In [ ]:
man_startdate = '1/1/2001' 
man_endate = '10/11/2002'
man_start_level = 10 
man_end_level =15.1

arcpy.AddMessage('Well {:} successfully imported!'.format(ind))

In [14]:
well = ll.new_trans_imp(r'M:\PROJECTS\Snake Valley Water\Transducer Data\Raw_data_archive\2017subset\ag13a 20170308.xle')

In [17]:
manualfile


Out[17]:
DateTime Location ID Water Level (ft) WellName Current Stickup Height
1970-01-01 00:00:00.000000000 8/16/2006 10:02 71.0 71.55 Snake Valley South MX NaN
1970-01-01 00:00:00.000000006 10/13/2006 10:02 71.0 71.57 Snake Valley South MX NaN
1970-01-01 00:00:00.000000010 12/15/2006 11:32 71.0 71.45 Snake Valley South MX NaN
1970-01-01 00:00:00.000000015 1/18/2007 12:39 71.0 71.41 Snake Valley South MX NaN
1970-01-01 00:00:00.000000021 2/27/2007 13:47 71.0 71.37 Snake Valley South MX NaN
1970-01-01 00:00:00.000000028 5/14/2007 13:42 71.0 71.33 Snake Valley South MX NaN
1970-01-01 00:00:00.000000036 7/18/2007 12:56 71.0 71.59 Snake Valley South MX NaN
1970-01-01 00:00:00.000000112 11/14/2007 7:53 71.0 71.62 Snake Valley South MX NaN
1970-01-01 00:00:00.000000140 2/28/2008 12:01 71.0 71.45 Snake Valley South MX NaN
1970-01-01 00:00:00.000000147 4/29/2008 10:12 71.0 71.35 Snake Valley South MX NaN
1970-01-01 00:00:00.000000188 7/28/2008 14:27 71.0 71.64 Snake Valley South MX NaN
1970-01-01 00:00:00.000000218 10/27/2008 16:28 71.0 71.72 Snake Valley South MX NaN
1970-01-01 00:00:00.000000292 2/10/2009 10:34 71.0 71.61 Snake Valley South MX NaN
1970-01-01 00:00:00.000000372 6/2/2009 9:35 71.0 71.44 Snake Valley South MX NaN
1970-01-01 00:00:00.000000425 9/14/2009 18:07 71.0 71.75 Snake Valley South MX NaN
1970-01-01 00:00:00.000000498 12/1/2009 16:41 71.0 71.63 Snake Valley South MX NaN
1970-01-01 00:00:00.000000571 3/2/2010 17:37 71.0 71.47 Snake Valley South MX NaN
1970-01-01 00:00:00.000000644 6/7/2010 18:29 71.0 71.36 Snake Valley South MX NaN
1970-01-01 00:00:00.000000720 9/13/2010 19:11 71.0 71.76 Snake Valley South MX NaN
1970-01-01 00:00:00.000000797 12/6/2010 17:28 71.0 71.68 Snake Valley South MX NaN
1970-01-01 00:00:00.000000899 3/1/2011 14:10 71.0 71.50 Snake Valley South MX NaN
1970-01-01 00:00:00.000000952 6/6/2011 17:13 71.0 71.39 Snake Valley South MX NaN
1970-01-01 00:00:00.000001029 9/12/2011 15:58 71.0 71.66 Snake Valley South MX NaN
1970-01-01 00:00:00.000001110 12/6/2011 10:02 71.0 71.61 Snake Valley South MX NaN
1970-01-01 00:00:00.000001177 2/27/2012 16:46 71.0 71.45 Snake Valley South MX NaN
1970-01-01 00:00:00.000001254 6/4/2012 14:29 71.0 71.42 Snake Valley South MX NaN
1970-01-01 00:00:00.000001334 9/10/2012 16:18 71.0 71.78 Snake Valley South MX NaN
1970-01-01 00:00:00.000001414 12/11/2012 10:02 71.0 71.62 Snake Valley South MX NaN
1970-01-01 00:00:00.000001485 3/4/2013 16:46 71.0 71.52 Snake Valley South MX NaN
1970-01-01 00:00:00.000001568 6/17/2013 16:33 71.0 71.50 Snake Valley South MX NaN
1970-01-01 00:00:00.000001644 9/16/2013 16:23 71.0 71.77 Snake Valley South MX NaN
1970-01-01 00:00:00.000001720 12/9/2013 17:05 71.0 71.68 Snake Valley South MX NaN
1970-01-01 00:00:00.000001790 3/3/2014 16:32 71.0 71.52 Snake Valley South MX NaN
1970-01-01 00:00:00.000001869 6/2/2014 16:35 71.0 71.45 Snake Valley South MX NaN
1970-01-01 00:00:00.000001939 9/8/2014 15:47 71.0 71.77 Snake Valley South MX NaN
1970-01-01 00:00:00.000002015 12/8/2014 16:31 71.0 71.61 Snake Valley South MX NaN
1970-01-01 00:00:00.000002092 3/2/2015 16:31 71.0 71.46 Snake Valley South MX NaN
1970-01-01 00:00:00.000002166 6/23/2015 17:12 71.0 71.47 Snake Valley South MX NaN
1970-01-01 00:00:00.000002276 12/1/2015 16:15 71.0 71.61 Snake Valley South MX NaN
1970-01-01 00:00:00.000002317 5/2/2016 15:55 71.0 71.42 Snake Valley South MX NaN
1970-01-01 00:00:00.000002450 8/1/2016 15:05 71.0 71.67 Snake Valley South MX NaN
1970-01-01 00:00:00.000002529 3/8/2017 9:58 71.0 71.51 Snake Valley South MX NaN

In [15]:
manl = pd.read_csv(u'M:\PROJECTS\Snake Valley Water\Transducer Data\Raw_data_archive\All tape measurements.csv')
manualfile = manl[manl['Location ID'] == int('71')]
manualfile.index = pd.to_datetime(manualfile.index)
manualfile.sort_index(inplace=True)
for i in range(len(manualfile)):
    print(wa.fcl(well, manualfile.index[i]).name)


2016-08-02 10:00:00
2016-08-02 10:00:00
2016-08-02 10:00:00
2016-08-02 10:00:00
2016-08-02 10:00:00
2016-08-02 10:00:00
2016-08-02 10:00:00
2016-08-02 10:00:00
2016-08-02 10:00:00
2016-08-02 10:00:00
2016-08-02 10:00:00
2016-08-02 10:00:00
2016-08-02 10:00:00
2016-08-02 10:00:00
2016-08-02 10:00:00
2016-08-02 10:00:00
2016-08-02 10:00:00
2016-08-02 10:00:00
2016-08-02 10:00:00
2016-08-02 10:00:00
2016-08-02 10:00:00
2016-08-02 10:00:00
2016-08-02 10:00:00
2016-08-02 10:00:00
2016-08-02 10:00:00
2016-08-02 10:00:00
2016-08-02 10:00:00
2016-08-02 10:00:00
2016-08-02 10:00:00
2016-08-02 10:00:00
2016-08-02 10:00:00
2016-08-02 10:00:00
2016-08-02 10:00:00
2016-08-02 10:00:00
2016-08-02 10:00:00
2016-08-02 10:00:00
2016-08-02 10:00:00
2016-08-02 10:00:00
2016-08-02 10:00:00
2016-08-02 10:00:00
2016-08-02 10:00:00
2016-08-02 10:00:00

Upload Well Data To SDE


In [11]:
barocolumn='MEASUREDLEVEL'
dft_ln = {}
dft_st = {}

from matplotlib.backends.backend_pdf import PdfPages
pdf_pages = PdfPages(folder + '/wells.pdf')

welltest = well_table.index.values
for ind in welltest:
    # import well file
    df, man, be, drift = ll.imp_well(well_table,ind,manual,baro_out)
    
    # plot data
    y1 = df['WATERELEVATION'].values
    y2 = df['barometer'].values
    x1 = df.index.values
    x2 = df.index.values

    x4 = man.index
    y4 = man['Meas_GW_Elev']
    fig, ax1 = plt.subplots()
    ax1.scatter(x4,y4,color='purple')
    ax1.plot(x1,y1,color='blue',label='Water Level Elevation')
    ax1.set_ylabel('Water Level Elevation',color='blue')
    ax1.set_ylim(min(df['WATERELEVATION']),max(df['WATERELEVATION']))
    y_formatter = tick.ScalarFormatter(useOffset=False)
    ax1.yaxis.set_major_formatter(y_formatter)
    ax2 = ax1.twinx()
    ax2.set_ylabel('Barometric Pressure (ft)', color='red') 
    ax2.plot(x2,y2,color='red',label='Barometric pressure (ft)')
    h1, l1 = ax1.get_legend_handles_labels()
    h2, l2 = ax2.get_legend_handles_labels()
    ax1.legend(h1+h2, l1+l2, loc=3)
    plt.xlim(df.first_valid_index()-datetime.timedelta(days=3),df.last_valid_index()+datetime.timedelta(days=3))
    plt.title('Well: {:}  Drift: {:}  Baro. Eff.: {:}'.format(ind,drift,be))

    pdf_pages.savefig(fig)
    plt.close()



    
pdf_pages.close()
print("DONE!")
print("Files in "+ folder+'\\wells.pdf')


C:\Program Files\ArcGIS\Pro\bin\Python\envs\arcgispro-py3\lib\site-packages\pandas\core\indexing.py:337: SettingWithCopyWarning: 
A value is trying to be set on a copy of a slice from a DataFrame.
Try using .loc[row_indexer,col_indexer] = value instead

See the caveats in the documentation: http://pandas.pydata.org/pandas-docs/stable/indexing.html#indexing-view-versus-copy
  self.obj[key] = _infer_fill_value(value)
C:\Program Files\ArcGIS\Pro\bin\Python\envs\arcgispro-py3\lib\site-packages\pandas\core\indexing.py:517: SettingWithCopyWarning: 
A value is trying to be set on a copy of a slice from a DataFrame.
Try using .loc[row_indexer,col_indexer] = value instead

See the caveats in the documentation: http://pandas.pydata.org/pandas-docs/stable/indexing.html#indexing-view-versus-copy
  self.obj[item] = s
C:\Program Files\ArcGIS\Pro\bin\Python\envs\arcgispro-py3\lib\site-packages\loggerloader\transport.py:158: SettingWithCopyWarning: 
A value is trying to be set on a copy of a slice from a DataFrame.
Try using .loc[row_indexer,col_indexer] = value instead

See the caveats in the documentation: http://pandas.pydata.org/pandas-docs/stable/indexing.html#indexing-view-versus-copy
  df['julian'] = df.index.to_julian_date()
C:\Program Files\ArcGIS\Pro\bin\Python\envs\arcgispro-py3\lib\site-packages\loggerloader\transport.py:174: SettingWithCopyWarning: 
A value is trying to be set on a copy of a slice from a DataFrame.
Try using .loc[row_indexer,col_indexer] = value instead

See the caveats in the documentation: http://pandas.pydata.org/pandas-docs/stable/indexing.html#indexing-view-versus-copy
  df['datechange'] = df['julian'].apply(lambda x: x - df.loc[df.index[0], 'julian'], 1)
C:\Program Files\ArcGIS\Pro\bin\Python\envs\arcgispro-py3\lib\site-packages\loggerloader\transport.py:178: SettingWithCopyWarning: 
A value is trying to be set on a copy of a slice from a DataFrame.
Try using .loc[row_indexer,col_indexer] = value instead

See the caveats in the documentation: http://pandas.pydata.org/pandas-docs/stable/indexing.html#indexing-view-versus-copy
  df['DRIFTCORRECTION'] = df['datechange'].apply(lambda x: m * x , 1)
C:\Program Files\ArcGIS\Pro\bin\Python\envs\arcgispro-py3\lib\site-packages\loggerloader\transport.py:179: SettingWithCopyWarning: 
A value is trying to be set on a copy of a slice from a DataFrame.
Try using .loc[row_indexer,col_indexer] = value instead

See the caveats in the documentation: http://pandas.pydata.org/pandas-docs/stable/indexing.html#indexing-view-versus-copy
  df[outcolname] = df[meas] - (df['DRIFTCORRECTION'] + b)
Last reading in database for well ag13a 20160802 was on 2017-03-08 10:00:00.
        The first reading from the raw file is on 2016-05-03 11:00:00. Drift = -0.009.
Dates later than import data for this station already exist!
Last reading in database for well ag13a 20170308 was on 2017-03-08 10:00:00.
        The first reading from the raw file is on 2016-08-02 11:00:00. Drift = 0.032.
Dates later than import data for this station already exist!
Last reading in database for well ag13b 20160802 was on 2016-08-02 09:00:00.
        The first reading from the raw file is on 2016-05-03 11:00:00. Drift = -0.041.
Dates later than import data for this station already exist!
Last reading in database for well ag13c 20160802 was on 2016-08-02 08:00:00.
        The first reading from the raw file is on 2016-05-03 11:00:00. Drift = -1.075.
Dates later than import data for this station already exist!
Last reading in database for well ag14b 20170309 was on 2017-03-09 07:00:00.
        The first reading from the raw file is on 2016-08-02 12:00:00. Drift = 0.106.
Dates later than import data for this station already exist!
Last reading in database for well ag14c 20160802 was on 2017-03-09 07:00:00.
        The first reading from the raw file is on 2016-05-03 13:00:00. Drift = -0.06.
Dates later than import data for this station already exist!
Last reading in database for well ag16a 20160802 was on 2017-03-08 09:00:00.
        The first reading from the raw file is on 2016-05-03 11:00:00. Drift = -0.029.
Dates later than import data for this station already exist!
Last reading in database for well ag16a 20170308 was on 2017-03-08 09:00:00.
        The first reading from the raw file is on 2016-08-02 10:00:00. Drift = 0.03.
Dates later than import data for this station already exist!
Last reading in database for well ag16b 20160802 was on 2016-08-02 07:00:00.
        The first reading from the raw file is on 2016-05-03 10:00:00. Drift = 0.02.
Dates later than import data for this station already exist!
Last reading in database for well callao 20160804 was on 2016-08-04 06:00:00.
        The first reading from the raw file is on 2016-05-05 10:00:00. Drift = -0.03.
Dates later than import data for this station already exist!
Last reading in database for well central tule mx 20160803 was on 2017-03-09 12:00:00.
        The first reading from the raw file is on 2016-05-04 15:00:00. Drift = -0.053.
Dates later than import data for this station already exist!
C:\Program Files\ArcGIS\Pro\bin\Python\envs\arcgispro-py3\lib\site-packages\loggerloader\transport.py:171: RuntimeWarning: divide by zero encountered in double_scalars
  m = drift / (last_man['julian'] - first_man['julian'])
Last reading in database for well ckmx 20170307 was on 2016-08-03 10:00:00.
        The first reading from the raw file is on 2016-08-03 14:00:00. Drift = 0.041.
Well ckmx 20170307 successfully imported!
Last reading in database for well coyote knolls mx 20160803 was on 2016-08-03 10:00:00.
        The first reading from the raw file is on 2016-05-04 14:00:00. Drift = -0.077.
Dates later than import data for this station already exist!
Last reading in database for well ctvmx 20170309 was on 2017-03-09 12:00:00.
        The first reading from the raw file is on 2016-08-03 15:00:00. Drift = 0.065.
Dates later than import data for this station already exist!
Units in psi, converting to ft...
Dropped from beginning to 2016-09-14 09:00:00
Last reading in database for well eskdale mx 20170309 was on 2017-03-09 08:00:00.
        The first reading from the raw file is on 2016-09-14 11:00:00. Drift = 0.274.
Dates later than import data for this station already exist!
Last reading in database for well needle point blm 20160801 was on 2016-08-01 13:00:00.
        The first reading from the raw file is on 2016-05-02 16:00:00. Drift = -0.031.
Dates later than import data for this station already exist!
Last reading in database for well pw01a 20160802 was on 2017-03-08 11:00:00.
        The first reading from the raw file is on 2016-05-03 12:00:00. Drift = -0.044.
Dates later than import data for this station already exist!
Dropped from beginning to 2016-08-02 10:00:00
Last reading in database for well pw01a 20170308 was on 2017-03-08 11:00:00.
        The first reading from the raw file is on 2016-08-02 12:00:00. Drift = -0.007.
Dates later than import data for this station already exist!
Last reading in database for well pw01b 20160802 was on 2017-03-08 11:00:00.
        The first reading from the raw file is on 2016-05-03 12:00:00. Drift = -0.043.
Dates later than import data for this station already exist!
Last reading in database for well pw01b 20170308 was on 2017-03-08 11:00:00.
        The first reading from the raw file is on 2016-08-02 11:00:00. Drift = 0.043.
Dates later than import data for this station already exist!
Last reading in database for well pw02a 20160801 was on 2017-03-08 09:00:00.
        The first reading from the raw file is on 2016-05-02 17:00:00. Drift = -0.024.
Dates later than import data for this station already exist!
Last reading in database for well pw02a 20170308 was on 2017-03-08 09:00:00.
        The first reading from the raw file is on 2016-08-01 17:00:00. Drift = -0.267.
Dates later than import data for this station already exist!
Last reading in database for well pw02b 20160801 was on 2017-03-08 09:00:00.
        The first reading from the raw file is on 2016-05-02 01:00:00. Drift = 0.01.
Dates later than import data for this station already exist!
Last reading in database for well pw02b 20170308 was on 2017-03-08 09:00:00.
        The first reading from the raw file is on 2016-08-01 17:00:00. Drift = 0.067.
Dates later than import data for this station already exist!
---------------------------------------------------------------------------
FileNotFoundError                         Traceback (most recent call last)
<ipython-input-11-0b9a31aaf7f8> in <module>()
      9 for ind in welltest:
     10     # import well file
---> 11     df, man, be, drift = ll.imp_well(well_table,ind,manual,baro_out)
     12 
     13     # plot data

C:\Program Files\ArcGIS\Pro\bin\Python\envs\arcgispro-py3\lib\site-packages\loggerloader\transport.py in imp_well(well_table, ind, manual, baro_out, gw_reading_table)
    965     import arcpy
    966     # import well file
--> 967     well = new_trans_imp(full_filepath, xle=(trans_type == 'Solinst'))
    968 
    969     # remove barometric pressure

C:\Program Files\ArcGIS\Pro\bin\Python\envs\arcgispro-py3\lib\site-packages\loggerloader\transport.py in new_trans_imp(infile, xle)
    665     """
    666     if xle:
--> 667         well = new_xle_imp(infile)
    668     else:
    669         well = new_csv_imp(infile)

C:\Program Files\ArcGIS\Pro\bin\Python\envs\arcgispro-py3\lib\site-packages\loggerloader\transport.py in new_xle_imp(infile)
    539     """
    540     # open text file
--> 541     with open(infile, "rb") as f:
    542         obj = xmltodict.parse(f, xml_attribs=True, encoding="ISO-8859-1")
    543     # navigate through xml to the data

FileNotFoundError: [Errno 2] No such file or directory: 'M:/PROJECTS/Snake Valley Water/Transducer Data/Raw_data_archive/2017/pw03 baro 20160803.xle'

In [12]:
pdf_pages.savefig(fig)
plt.close()



    
pdf_pages.close()
print("DONE!")
print("Files in "+ folder+'\\wells.pdf')


DONE!
Files in M:/PROJECTS/Snake Valley Water/Transducer Data/Raw_data_archive/2017/\wells.pdf

In [ ]:
def baro_eff(df, bp, wl, lag=200):
    import statsmodels.tsa.tsatools as tools
    df.dropna(inplace=True)
    dwl = df[wl].diff().values[1:-1]
    dbp = df[bp].diff().values[1:-1]
    # dwl = df[wl].values[1:-1]
    # dbp = df[bp].values[1:-1]
    df['j_dates'] = df.index.to_julian_date()
    lag_time = df['j_dates'].diff().cumsum().values[1:-1]
    df.drop('j_dates', axis=1, inplace=True)
    # Calculate BP Response Function

    ## create lag matrix for regression
    bpmat = tools.lagmat(dbp, lag, original='in')
    ## transpose matrix to determine required length
    ## run least squared regression
    sqrd = np.linalg.lstsq(bpmat, dwl)
    wlls = sqrd[0]
    cumls = np.cumsum(wlls)
    negcumls = [-1 * cumls[i] for i in range(len(cumls))]
    ymod = np.dot(bpmat, wlls)

    ## resid gives the residual of the bp
    resid = [(dwl[i] - ymod[i]) for i in range(len(dwl))]
    lag_trim = lag_time[0:len(cumls)]
    return negcumls, cumls, ymod, resid, lag_time, dwl, dbp, wlls

In [ ]:
baro_eff(df,'barometer','Level')

In [ ]:
# plot data
wl = 'cor2'
y1 = df[wl].values
y2 = df['barometer'].values
x1 = df.index.values
x2 = df.index.values

x4 = man.index
y4 = man['Meas_GW_Elev']
fig, ax1 = plt.subplots()
ax1.scatter(x4,y4,color='purple')
ax1.plot(x1,y1,color='blue',label='Water Level Elevation')
ax1.set_ylabel('Water Level Elevation',color='blue')
ax1.set_ylim(min(df[wl]),max(df[wl]))
y_formatter = tick.ScalarFormatter(useOffset=False)
ax1.yaxis.set_major_formatter(y_formatter)
ax2 = ax1.twinx()
ax2.set_ylabel('Barometric Pressure (ft)', color='red') 
ax2.plot(x2,y2,color='red',label='Barometric pressure (ft)')
h1, l1 = ax1.get_legend_handles_labels()
h2, l2 = ax2.get_legend_handles_labels()
ax1.legend(h1+h2, l1+l2, loc=3)
plt.xlim(df.first_valid_index()-datetime.timedelta(days=3),df.last_valid_index()+datetime.timedelta(days=3))
plt.title('Well: {:}  Drift: {:}  Baro. Eff.: {:}'.format(ind,drift,be))

In [ ]:
df['cor2'] = df[['Level', 'barometer']].\
        apply(lambda x: x[0] + 0.25 * (x[1]), 1)

In [ ]:
df['barometer'][0]

In [ ]:
plt.scatter(df['Level'].diff(), df['barometer'].diff())
import statsmodels.api as sm
df['dbp'] = df['barometer'].diff()
df['dwl'] = df['corrwl'].diff()
df1 = df.dropna(subset=['dbp','dwl'])
x = df1['dbp']
y = df1['dwl']
X = sm.add_constant(x)
model = sm.OLS(y, X).fit()
    # y_reg = [data.ix[i,'Sbp']*m+b for i in range(len(data['Sbp']))]
b = model.params[0]
m = model.params[1]
r = model.rsquared
print(m,r)

In [ ]:
be, intc, r = ll.clarks(df[500:600],'barometer','corrwl')
print(be,intc,r)

Barometric Pressure


In [ ]:
#baro_twin = lgld.compilation("H:/SnakeValley/Wetlands/Baro/1044788*")
baro_leland = lgld.compilation("H:/SnakeValley/Wetlands/Baro/*1044779*")

In [ ]:
baro_twin = lgld.compilation("H:/SnakeValley/Wetlands/Baro/*1044788*")

In [ ]:
xlefiles = xle_head_table("H:/SnakeValley/Wetlands/Baro/")

In [ ]:
#xlefiles.index = xlefiles.index.droplevel(level=1)
xlefiles.index.name = 'filename'

In [ ]:
pw03baro_append = folder + "\\pw03 baro 2016-08-03.xle"
pw10baro_append = folder + "\\pw10 baro 2016-08-03.xle"
pw19baro_append = folder + "\\pw19 baro 2016-08-04.xle"

In [ ]:
pw03baro = raw_archive_folder + "\\pw03baro.csv"
pw10baro = raw_archive_folder + "\\pw10baro.csv"
pw19baro = raw_archive_folder +  "\\pw19baro.csv"

In [ ]:
wa.appendomatic(pw03baro_append,pw03baro)
wa.appendomatic(pw10baro_append,pw10baro)
wa.appendomatic(pw19baro_append,pw19baro)

In [ ]:
# duplicated to update changes made by appendomatic
pw03baro = pd.read_csv(raw_archive_folder + "\\pw03baro.csv",index_col='DateTime',parse_dates=True)
pw10baro = pd.read_csv(raw_archive_folder + "\\pw10baro.csv",index_col='DateTime',parse_dates=True)
pw19baro = pd.read_csv(raw_archive_folder + "\\pw19baro.csv",index_col='DateTime',parse_dates=True)

In [ ]:
pw03baro = lgld.hourly_resample(pw03baro)
pw10baro = lgld.hourly_resample(pw10baro)
pw19baro = lgld.hourly_resample(pw19baro)

In [ ]:
baro = pd.merge(pw03baro,pw10baro,how="outer",left_index=True,right_index=True)
baro = pd.merge(baro,pw19baro,how="outer",left_index=True,right_index=True)
baro.dropna(axis=0,inplace=True)
baro['integr'] = 0 #for vented transducers

In [ ]:
baro[['pw03','pw10','pw19']].plot()

In [ ]:
pw03baro['Temperature'].plot()

In [ ]:


In [ ]:


In [ ]:


In [ ]:
barodict = {"PW10 Barometer":9027, "PW19 Barometer":9049, "SG25 Barometer":9061, 
            "Leland-Harris Barometer":9025, "Twin Springs Barometer":9024, "PW03 Barometer":9003}
bpdict = {'pw03':'9003','pw10':'9027','pw19':'9049','twin':'9024','leland':'9025'}

In [ ]:


In [ ]:
baro.to_csv(raw_archive_folder + '\\baro.csv')

In [ ]:
'ReadingID',
 'WellID',
 'DateTime',
 'MeasuredLevel',
 'Temp',

Water Level Tranducer Data

Export and Plot Data

Export Manual Data


In [ ]:
manualwls = raw_archive_folder + "\\All tape measurements.csv"
manual = pd.read_csv(manualwls, index_col="DateTime", engine="python")
manualrecent = manual[manual.index.to_datetime() > pd.datetime(2015,6,1)]
manualrecent.dropna(inplace=True)
manualrecent.reset_index(inplace=True)
print manualrecent.dtypes
manualrecent = pd.merge(manualrecent, wellinfo, how='left',left_on='WellID', right_index=True)
manualrecent.loc[:,"MeasuredLevel"] = np.nan
manualrecent.loc[:,"Temp"] = np.nan
manualrecent.loc[:,"BaroEfficiencyCorrected"] = np.nan 
manualrecent.loc[:,"DeltaLevel"] = np.nan
manualrecent.loc[:,"DriftCorrection"] = np.nan
manualrecent.loc[:,"MeasuredBy"] = np.nan
manualrecent.loc[:,"Tape"] = 1
manualrecent.loc[:,"DTWBelowGroundSurface"] = np.nan
manualrecent.loc[:,"WaterElevation"] = np.nan
#manualrecent["DTWBelowGroundSurface"] = np.nan
manualrecent.loc[:,"DTWBelowCasing"] = manualrecent.loc[:,"MeasuredDTW"]
manualrecent.loc[:,"DTWBelowGroundSurface"] = manualrecent.loc[:,"MeasuredDTW"] - manualrecent.loc[:,"Offset"]
manualrecent.loc[:,"WaterElevation"] = manualrecent.loc[:,'GroundElevation'] - manualrecent.loc[:,"DTWBelowGroundSurface"]
print manualrecent

#outpath = pathlist[0] + '\\' + pathlist[1] + '\\' + pathlist[2] + '\\' + pathlist[3] + '\\' + pathlist[4] + '\\' + 'Manual' + '.csv'  
manualrecent.to_csv(raw_archive_folder+ 'Manual' + '.csv', index=True, columns= ["WellID","DateTime","MeasuredLevel","Temp","BaroEfficiencyCorrected","DeltaLevel",
                                             "MeasuredDTW","DriftCorrection","DTWBelowCasing","DTWBelowGroundSurface",
                                             "WaterElevation","Tape","MeasuredBy"])

In [ ]:
manual['DateTime'] = manual.index.to_datetime()
manual.to_csv(raw_archive_folder+ 'Manual' + '.csv', index=False, columns= ["WellID","DateTime","MeasuredLevel","Temp","BaroEfficiencyCorrected","DeltaLevel",
                                             "MeasuredDTW","DriftCorrection","DTWBelowCasing","DTWBelowGroundSurface",
                                             "WaterElevation","Tape","MeasuredBy"])

List Files to Export


In [ ]:
print wellinfo.loc[:,'full_file_name']

Files To Database


In [ ]:
manualwls = raw_archive_folder + '/All tape measurements.csv'
manual = pd.read_csv(manualwls, index_col="DateTime", engine="python")
barofile = raw_archive_folder + '/baro.csv'
baro = pd.read_csv(barofile,index_col=0, parse_dates=True)
wellinfo = pd.read_csv(folder + '/wellinfo4.csv')

In [ ]:
engine = engineGetter.getEngine()

Add Select Wells


In [ ]:
form = widgets.VBox()
welllist = ["pw07mx 2015-11-30.csv","pw02a 11-30-2015.xle","pw02a 2016-07-07.xle","sg23b 2016-05-02.xle"]
welllist = welllist+ list(wellinfo.loc[:,'full_file_name'].values)
#print welllist

wells = widgets.SelectMultiple(description="Well", options=welllist, padding=4)
pdfName = widgets.Text(description="PDF:",padding=4)
driftTol = widgets.FloatSlider(value=0.05, min=0.00, max=10.0, step=0.05, description='Drift Tolerance:')
form.children = [wells, pdfName, driftTol]
display(form)

In [ ]:
for i in wells.value:
    print folder+'/'+i
    inputfile = folder +'/'+i
    g, drift, wellname = svdi.imp_new_well(inputfile, wellinfo, manual, baro)
    glist = g.columns.tolist()
    y1 = g['WaterElevation'].values
    y2 = baro['pw03'].values
    x1 = g.index.values
    x2 = baro.index.values
    wellname, wellid = svdi.getwellid(folder+'\\'+i,wellinfo)
    ylast = wellinfo[wellinfo['WellID']==wellid]['GroundElevation'].values[0] + wellinfo[wellinfo['WellID']==wellid]['Offset'].values[0] - svdi.fcl(manual[manual['WellID']== wellid],max(g.index.to_datetime()))[1]
    yfirst = wellinfo[wellinfo['WellID']==wellid]['GroundElevation'].values[0] + wellinfo[wellinfo['WellID']==wellid]['Offset'].values[0] - svdi.fcl(manual[manual['WellID']== wellid],min(g.index.to_datetime()))[1]
    xlast = pd.to_datetime((svdi.fcl(manual[manual['WellID']== wellid],max(pd.to_datetime(g.index)))).name)
    xfirst = pd.to_datetime((svdi.fcl(manual[manual['WellID']== wellid],min(pd.to_datetime(g.index)))).name)
    x4 = [xfirst,xlast]
    y4 = [yfirst,ylast]
    fig, ax1 = plt.subplots()
    ax1.scatter(x4,y4,color='purple')
    ax1.plot(x1,y1,color='blue',label='Water Level Elevation')
    ax1.set_ylabel('Water Level Elevation',color='blue')
    y_formatter = tick.ScalarFormatter(useOffset=False)
    ax1.yaxis.set_major_formatter(y_formatter)
    ax2 = ax1.twinx()
    ax2.set_ylabel('Barometric Pressure (ft)', color='red') 
    ax2.plot(x2,y2,color='red',label='Barometric pressure (ft)')
    h1, l1 = ax1.get_legend_handles_labels()
    h2, l2 = ax2.get_legend_handles_labels()
    ax1.legend(h1+h2, l1+l2, loc=3)
    plt.xlim(xfirst-timedelta(days=3),xlast+timedelta(days=3))
    plt.title('Well: ' + wellname.title() + '  ' + 'Total Drift = ' + str(g['DriftCorrection'][-1]))

In [ ]:
env.workspace = "C:/Users/PAULINKENBRANDT/AppData/Roaming/ESRI/Desktop10.4/ArcCatalog/UEMP_Dev.sde"
read_table = "UEMP_Dev.UEMPADMIN.GW_reading"

arcpy.env.overwriteOutput=True
edit = arcpy.da.Editor(env.workspace)
edit.startEditing(False, True)
edit.startOperation()

In [ ]:
pdf_pages = PdfPages(folder + '/' + pdfName.value + '.pdf')
for i in wells.value:
    print folder+'/'+i
    inputfile = folder +'/'+i
    g, drift, wellname = svdi.imp_new_well(inputfile, wellinfo, manual, baro)
    quer = "SELECT * FROM groundwater.reading where WellID = " + str(g['WellID'].values[0]) + " and DateTime > \'" + str(g.index.values[-1])[0:10] + " "+ str(g.index.values[-1])[11:19] + "\'"

    if abs(float(drift)) < driftTol.value:
        if len(pd.read_sql_query(sql=quer,con=engine))<1:
            g.to_csv(enteredFolder+wellname+".csv", index=False)
            tablename = 'reading'
            g.to_sql(con=engine, name = tablename, if_exists='append', flavor='mysql', index=False)
            print("Added to DB table " + tablename)
        else:
            print("Already Entered")
            print(len(pd.read_sql_query(sql=quer,con=engine)))
    else:
        g.to_csv(checkFolder+wellname+".csv", index=False, columns= ["WellID","DateTime","MeasuredLevel","Temp","BaroEfficiencyCorrected","DeltaLevel",
                                             "MeasuredDTW","DriftCorrection","DTWBelowCasing","DTWBelowGroundSurface",
                                             "WaterElevation","Tape","MeasuredBy"])
        print("Check File")
    glist = g.columns.tolist()
    for j in range(len(glist)):
        if 'pw' in glist[j]:
            h = glist[j]
    y1 = g['WaterElevation'].values
    y2 = baro['pw03'].values
    x1 = g.index.values
    x2 = baro.index.values
    wellname, wellid = svdi.getwellid(folder+'\\'+i,wellinfo)
    ylast = wellinfo[wellinfo['WellID']==wellid]['GroundElevation'].values[0] + wellinfo[wellinfo['WellID']==wellid]['Offset'].values[0] - svdi.fcl(manual[manual['WellID']== wellid],max(pd.to_datetime(g.index)))[1]
    yfirst = wellinfo[wellinfo['WellID']==wellid]['GroundElevation'].values[0] + wellinfo[wellinfo['WellID']==wellid]['Offset'].values[0] - svdi.fcl(manual[manual['WellID']== wellid],min(pd.to_datetime(g.index)))[1]
    xlast = pd.to_datetime(svdi.fcl(manual[manual['WellID']== wellid],max(pd.to_datetime(g.index))).name)
    xfirst = pd.to_datetime(svdi.fcl(manual[manual['WellID']== wellid],min(pd.to_datetime(g.index))).name)
    x4 = [xfirst,xlast]
    y4 = [yfirst,ylast]
    fig, ax1 = plt.subplots()
    ax1.scatter(x4,y4,color='purple')
    ax1.plot(x1,y1,color='blue',label='Water Level Elevation')
    ax1.set_ylabel('Water Level Elevation',color='blue')
    y_formatter = tick.ScalarFormatter(useOffset=False)
    ax1.yaxis.set_major_formatter(y_formatter)
    ax2 = ax1.twinx()
    ax2.set_ylabel('Barometric Pressure (ft)', color='red') 
    ax2.plot(x2,y2,color='red',label='Barometric pressure (ft)')
    h1, l1 = ax1.get_legend_handles_labels()
    h2, l2 = ax2.get_legend_handles_labels()
    ax1.legend(h1+h2, l1+l2, loc=3)
    plt.xlim(xfirst-timedelta(days=3),xlast+timedelta(days=3))
    plt.title('Well: ' + wellname.title() + '  ' + 'Total Drift = ' + str(g['DriftCorrection'][-1]))
    
    pdf_pages.savefig(fig)
    plt.close()
pdf_pages.close()
print("DONE!")
print("Files in "+ folder+'\\wells.pdf')
print(datetime.now())

In [ ]:
form = widgets.VBox()
driftTol = widgets.FloatSlider(value=0.05, min=0.00, max=10.0, step=0.05, description='Drift Tolerance:')
form.children = [driftTol]
display(form)

In [ ]:
pdf_pages = PdfPages(folder + '/wells.pdf')
for i in wellinfo.loc[:,'full_file_name']:
    print folder+'/'+i
    inputfile = folder +'/'+i
    g, drift, wellname = svdi.imp_new_well(inputfile, wellinfo, manual, baro)
    quer = "SELECT * FROM groundwater.reading where WellID = " + str(g['WellID'].values[0]) + " and (DateTime >= \'" + str(g.index.values[-1])[0:10] + "\')"

    if abs(float(drift)) < driftTol.value:
        if len(pd.read_sql_query(sql=quer,con=engine))<1:
            g.to_csv(enteredFolder + wellname+".csv", index=False)
            g.to_sql(con=engine, name='reading', if_exists='append', flavor='mysql', index=False)
            print("Added to DB")
        else:
            print("Already Entered")
    else:
        g.to_csv(checkFolder + wellname+".csv", index=False, columns= ["WellID","DateTime","MeasuredLevel","Temp","BaroEfficiencyCorrected","DeltaLevel",
                                             "MeasuredDTW","DriftCorrection","DTWBelowCasing","DTWBelowGroundSurface",
                                             "WaterElevation","Tape","MeasuredBy"])
        print("Check File")
    glist = g.columns.tolist()
    for j in range(len(glist)):
        if 'pw' in glist[j]:
            h = glist[j]
    y1 = g['WaterElevation'].values
    y2 = baro['pw03'].values
    x1 = g.index.values
    x2 = baro.index.values
    wellname, wellid = svdi.getwellid(folder+'\\'+i,wellinfo)
    ylast = wellinfo[wellinfo['WellID']==wellid]['GroundElevation'].values[0] + wellinfo[wellinfo['WellID']==wellid]['Offset'].values[0] - svdi.fcl(manual[manual['WellID']== wellid],max(pd.to_datetime(g.index)))[1]
    yfirst = wellinfo[wellinfo['WellID']==wellid]['GroundElevation'].values[0] + wellinfo[wellinfo['WellID']==wellid]['Offset'].values[0] - svdi.fcl(manual[manual['WellID']== wellid],min(pd.to_datetime(g.index)))[1]
    xlast = pd.to_datetime(svdi.fcl(manual[manual['WellID']== wellid],max(pd.to_datetime(g.index))).name)
    xfirst = pd.to_datetime(svdi.fcl(manual[manual['WellID']== wellid],min(pd.to_datetime(g.index))).name)
    x4 = [xfirst,xlast]
    y4 = [yfirst,ylast]
    fig, ax1 = plt.subplots()
    ax1.scatter(x4,y4,color='purple')
    ax1.plot(x1,y1,color='blue',label='Water Level Elevation')
    ax1.set_ylabel('Water Level Elevation',color='blue')
    y_formatter = tick.ScalarFormatter(useOffset=False)
    ax1.yaxis.set_major_formatter(y_formatter)
    ax2 = ax1.twinx()
    ax2.set_ylabel('Barometric Pressure (ft)', color='red') 
    ax2.plot(x2,y2,color='red',label='Barometric pressure (ft)')
    h1, l1 = ax1.get_legend_handles_labels()
    h2, l2 = ax2.get_legend_handles_labels()
    ax1.legend(h1+h2, l1+l2, loc=3)
    plt.xlim(min(pd.to_datetime(g.index))-timedelta(days=3),max(pd.to_datetime(g.index))+timedelta(days=3))
    plt.title('Well: ' + wellname.title() + '  ' + 'Total Drift = ' + str(g['DriftCorrection'][-1]))
    
    pdf_pages.savefig(fig)
    plt.close()
pdf_pages.close()
print("DONE!")
print("Files in "+ folder+'\\wells.pdf')