In [1]:
import pandas as pd
from glob import glob
from simpledbf import Dbf5
import os
import pyproj
import numpy as np
import geopandas as gpd
import warnings
In [2]:
def dist_pyth(lon1, lat1, lon2, lat2):
"""Calculate the distance between two points """
return np.sqrt((lat2 - lat1)**2 + (lon2 - lon1)**2)
def haversine(lon1, lat1, lon2, lat2):
"""
Calculate the great circle distance between one point
on the earth and an array of points (specified in decimal degrees)
"""
# convert decimal degrees to radians
lon1, lat1, lon2, lat2 = map(np.radians, [lon1, lat1, lon2, lat2])
# haversine formula
dlon = lon2 - lon1
dlat = lat2 - lat1
a = np.sin(dlat/2)**2 + np.cos(lat1) * np.cos(lat2) * np.sin(dlon/2)**2
c = 2 * np.arcsin(np.sqrt(a))
r = 6371000 # Radius of earth in meters
return c * r
def p_dist(lon1, lat1, lon2, lat2, units=None):
"""
Calculate the distance between two *CLOSE* points in meters using Pythagoras
"""
if units == 'm':
dist = dist_pyth(lon1, lat1, lon2, lat2)
elif units == 'deg':
dist = haversine(lon1, lat1, lon2, lat2)
else:
raise ValueError('Units must be meters (m) or degrees (deg).')
return dist
def p_thin(df, xcol='x', ycol='y', datacols='data', radius=10, method='nanmean', units=None):
"""
Thin a pandas point series based on distance of the points.
Parameters
----------
pseries: Series of points
xcol: name of column with x coordinates
ycol: name of columns with y coordinates
datacol: name fo columns with data
radius: search radius for point distance
method: calculation method (a numpy method). Default: 'nanmean'
Returns
-------
A thinned dataframe
"""
assert units!=None,'Units must be meters (m) or degrees (deg).'
thinned = pd.DataFrame([],columns=df.columns.values)
print(len(df))
ct=0
while len(df)>0:
df['DIST'] = p_dist(df[xcol].iloc[0], df[ycol].iloc[0], df[xcol], df[ycol], units=units)
df = df.sort_values('DIST')
subset = df[df.DIST <= radius]
if not subset.empty:
subset = subset.reset_index()
fill_dict = {}
fill_dict[xcol] = subset.loc[int(len(subset)/2), xcol]
fill_dict[ycol] = subset.loc[int(len(subset)/2), ycol]
for data in datacols:
try:
fill_dict[data] = getattr(np,method)(subset[data].values)
except TypeError:
fill_dict[data] = np.nan
thinned.loc[len(thinned)] = pd.Series(fill_dict)
ct+=1
if ct%100==0:
print(ct, len(df))
# delete the used points from the data
df = df.drop(df[df.DIST <= radius].index.values)
print(len(thinned))
return thinned
In [3]:
templ_t = pd.read_excel('C:\\Users\\jlandman\\Desktop\\GlaThiDa_2-0_DataSubmissionForm_for_pandas.xls', sheetname='T - GLACIER THICKNESS OVERVIEW')
templ_tt = pd.read_excel('C:\\Users\\jlandman\\Desktop\\GlaThiDa_2-0_DataSubmissionForm_for_pandas.xls', sheetname='TT - GL. THICKNESS DISTRIBUTION')
templ_ttt = pd.read_excel('C:\\Users\\jlandman\\Desktop\\GlaThiDa_2-0_DataSubmissionForm_for_pandas.xls', sheetname='TTT - GL. THICKNESS POINT DATA')
In [4]:
gtd_ids = {'brewster': 2077,
'north': 2078,
'south':2079,
'starbuck_gprt':2080,
'starbuck_icebr':2081,
'tasman': 2082,
'urumqui':2083,
'west_washmawapta':2084}
In [52]:
nzgd = pyproj.Proj(init='epsg:2193') # NZGD2000_New_Zealand_Transverse_Mercator_2000
latlon = pyproj.Proj(init='epsg:4326') #WGS84 lat/lon
In [53]:
# constants
brew = {'punit': 'NZ',
'gname': 'BREWSTER',
'src_id':'',
'src_g_id':'',
'survey_date': '19979999',
'dem_date': '19860399',
'gid': gtd_ids['brewster'],
'lat': -44.07,
'lon': 169.43,
'area': 2.62,
'mean_sl': 15,
'mean_th': np.nan,
'mean_th_unc': np.nan,
'max_th': np.nan,
'max_th_unc': np.nan,
'surv_meth': 'GPRt',
'surv_meth_det': '',
'no_points': 588,
'no_prfls': 4,
'length_prfls': np.nan,
'interp_meth':'',
'investig':'Bob JACOBEL and Ian OWENS; compiled by Brian ANDERSON',
'spons_ag':'',
'ref':'Willis, I. et al. (2009). Hydrological Processes, 23(3), p.384-396. DOI: 10.1002/hyp.7146',
'dflag':np.nan,
'remarks_t':'Source_ID: Outline and DEM from aerial photography for the NZMS260 mapping series flown in 3/1986. Lat/lon from WGMS FoG2015. Data unpublished, but used in given reference. Mean slope calculated from DEM by WGMS.',
'remarks_ttt':''
}
In [55]:
brewster_path = 'C:\\Users\\jlandman\\Desktop\\newData\\WG_Farinotti_data_package\\brewster\\brewster_jacobel_owens_bed_xyz.csv'
# read original data
brew_dat = pd.read_csv(brewster_path)
# convert New Real TM to WGS84 lat/lon
xs = brew_dat['nztm_easting (m)'].values
ys = brew_dat[' nztm_northing (m)'].values
x1,y1 = nzgd(xs, ys)
lons, lats = pyproj.transform(nzgd,latlon,xs,ys)
# fill T table
brew_t = templ_t.copy()
brew_t.loc[0,'GlaThiDa_ID'] = brew['gid']
brew_t.loc[0,'POLITICAL_UNIT'] = brew['punit']
brew_t.loc[0,'GLACIER_NAME'] = brew['gname']
brew_t.loc[0,'SOURCE_ID'] = brew['src_id']
brew_t.loc[0,'GLACIER_ID'] = brew['src_g_id']
brew_t.loc[0,'LAT'] = brew['lat']
brew_t.loc[0,'LON'] = brew['lon']
brew_t.loc[0,'SURVEY_DATE'] = brew['survey_date']
brew_t.loc[0,'DEM_DATE'] = brew['dem_date']
brew_t.loc[0,'AREA'] = brew['area']
brew_t.loc[0,'MEAN_SLOPE'] = brew['mean_sl']
brew_t.loc[0,'MEAN_THICKNESS'] = brew['mean_th']
brew_t.loc[0,'MEAN_THICKNESS_UNCERTAINTY'] = brew['mean_th_unc']
brew_t.loc[0,'MAXIMUM_THICKNESS'] = brew['max_th']
brew_t.loc[0,'MAX_THICKNESS_UNCERTAINTY'] = brew['max_th_unc']
brew_t.loc[0,'SURVEY_METHOD'] = brew['surv_meth']
brew_t.loc[0,'SURVEY_METHOD_DETAILS'] = brew['surv_meth_det']
brew_t.loc[0,'NUMBER_OF_SURVEY_POINTS'] = brew['no_points']
brew_t.loc[0,'NUMBER_OF_SURVEY_PROFILES'] = brew['no_prfls']
brew_t.loc[0,'TOTAL_LENGTH_OF_SURVEY_PROFILES'] = brew['length_prfls']
brew_t.loc[0,'INTERPOLATION_METHOD'] = brew['interp_meth']
brew_t.loc[0,'INVESTIGATOR'] = brew['investig']
brew_t.loc[0,'SPONSORING_AGENCY'] = brew['spons_ag']
brew_t.loc[0,'REFERENCES'] = brew['ref']
brew_t.loc[0,'DATA_FLAG'] = brew['dflag']
brew_t.loc[0,'REMARKS'] = brew['remarks_t']
# fill TTT table
brew_ttt = templ_ttt.copy()
brew_ttt['POINT_ID'] = range(1,len(brew_dat)+1)
brew_ttt['POINT_LAT'] = lats
brew_ttt['POINT_LON'] = lons
brew_ttt['ELEVATION'] = brew_dat[' surface_elevation (m asl)'].round()
brew_ttt['THICKNESS'] = brew_dat[' ice_thickness (m)'].round()
brew_ttt['THICKNESS_UNCERTAINTY'] = np.nan
brew_ttt['GlaThiDa_ID'] = brew['gid']
brew_ttt['POLITICAL_UNIT'] = brew['punit']
brew_ttt['GLACIER_NAME'] = brew['gname']
brew_ttt['SURVEY_DATE'] = brew['survey_date']
brew_ttt['DATA_FLAG'] = brew['dflag']
brew_ttt['REMARKS'] = brew['remarks_ttt']
brew_t = brew_t[['GlaThiDa_ID', 'POLITICAL_UNIT', 'GLACIER_NAME', 'SOURCE_ID', 'GLACIER_ID', 'LAT', 'LON', 'SURVEY_DATE',
'DEM_DATE', 'AREA', 'MEAN_SLOPE', 'MEAN_THICKNESS', 'MEAN_THICKNESS_UNCERTAINTY', 'MAXIMUM_THICKNESS',
'MAX_THICKNESS_UNCERTAINTY', 'SURVEY_METHOD', 'SURVEY_METHOD_DETAILS', 'NUMBER_OF_SURVEY_POINTS',
'NUMBER_OF_SURVEY_PROFILES', 'TOTAL_LENGTH_OF_SURVEY_PROFILES', 'INTERPOLATION_METHOD', 'INVESTIGATOR',
'SPONSORING_AGENCY', 'REFERENCES', 'DATA_FLAG', 'REMARKS']]
brew_ttt = brew_ttt[['GlaThiDa_ID','POLITICAL_UNIT','GLACIER_NAME','SURVEY_DATE','POINT_ID','POINT_LAT','POINT_LON',
'ELEVATION','THICKNESS','THICKNESS_UNCERTAINTY','DATA_FLAG','REMARKS']]
brew_t.to_excel(os.path.join(os.path.dirname(brewster_path),brewster_path.split('.')[0]+'_Johannes_T.xls'), index=False)
brew_ttt.to_excel(os.path.join(os.path.dirname(brewster_path),brewster_path.split('.')[0]+'_Johannes_TTT.xls'), index=False)
In [8]:
utm7n = pyproj.Proj(init='epsg:32607') # UTM 7N (WGS84)
latlon = pyproj.Proj(init='epsg:4326') #WGS84 lat/lon
In [30]:
# constants
north = {'punit': 'CA',
'gname': 'NORTH',
'src_id':'',
'src_g_id':'',
'survey_date': '20119999',
'dem_date': '20079999',
'gid': gtd_ids['north'],
'lat': 60.911031,
'lon': -139.150803,
'area': 6.9,
'mean_sl': np.nan,
'mean_th': np.nan,
'mean_th_unc': np.nan,
'max_th': np.nan,
'max_th_unc': np.nan,
'surv_meth': 'OTH',
'surv_meth_det': 'Radar sounding data at center frequencies of 10, 35, and 50MHz were collected using the ice-penetrating radar system described by Mingo and Flowers [2010].',
'no_points': 7277,
'no_prfls': np.nan,
'length_prfls': np.nan,
'interp_meth':'KRG',
'investig':'Nat WILSON, Glenn FLOWERS, Laurent MINGO',
'spons_ag':'',
'ref':'Wilson, N. J., Flowers, G. E., & Mingo, L. (2013). Journal of Geophysical Research: Earth Surface, 118(3), pp.1443-1459. DOI:10.1002/jgrf.20096',
'dflag':np.nan,
'remarks_t':'Lat/lon from Google Earth. Survey methods: GPRt and DRIh',
'remarks_ttt':''
}
#thin_value_north = 7
search_dist_north = 10 #
In [57]:
north_path = 'C:\\Users\\jlandman\\Desktop\\newData\\WG_Farinotti_data_package\\north_and_south_glacier\\depth_GL2_080911.xyz'
# read original data
north_dat = pd.read_csv(north_path, header=None, sep='\t') # 0:easting (m), 1:northing (m), 2:depth (m), 3:estimated picking error (m)
# convert New Real TM to WGS84 lat/lon
xs = north_dat[0].values
ys = north_dat[1].values
x1,y1 = utm7n(xs, ys)
lons, lats = pyproj.transform(utm7n,latlon,xs,ys)
# fill T table
north_t = templ_t.copy()
north_t.loc[0,'GlaThiDa_ID'] = north['gid']
north_t.loc[0,'POLITICAL_UNIT'] = north['punit']
north_t.loc[0,'GLACIER_NAME'] = north['gname']
north_t.loc[0,'SOURCE_ID'] = north['src_id']
north_t.loc[0,'GLACIER_ID'] = north['src_g_id']
north_t.loc[0,'LAT'] = north['lat']
north_t.loc[0,'LON'] = north['lon']
north_t.loc[0,'SURVEY_DATE'] = north['survey_date']
north_t.loc[0,'DEM_DATE'] = north['dem_date']
north_t.loc[0,'AREA'] = north['area']
north_t.loc[0,'MEAN_SLOPE'] = north['mean_sl']
north_t.loc[0,'MEAN_THICKNESS'] = north['mean_th']
north_t.loc[0,'MEAN_THICKNESS_UNCERTAINTY'] = north['mean_th_unc']
north_t.loc[0,'MAXIMUM_THICKNESS'] = north['max_th']
north_t.loc[0,'MAX_THICKNESS_UNCERTAINTY'] = north['max_th_unc']
north_t.loc[0,'SURVEY_METHOD'] = north['surv_meth']
north_t.loc[0,'SURVEY_METHOD_DETAILS'] = north['surv_meth_det']
north_t.loc[0,'NUMBER_OF_SURVEY_POINTS'] = north['no_points']
north_t.loc[0,'NUMBER_OF_SURVEY_PROFILES'] = north['no_prfls']
north_t.loc[0,'TOTAL_LENGTH_OF_SURVEY_PROFILES'] = north['length_prfls']
north_t.loc[0,'INTERPOLATION_METHOD'] = north['interp_meth']
north_t.loc[0,'INVESTIGATOR'] = north['investig']
north_t.loc[0,'SPONSORING_AGENCY'] = north['spons_ag']
north_t.loc[0,'REFERENCES'] = north['ref']
north_t.loc[0,'DATA_FLAG'] = north['dflag']
north_t.loc[0,'REMARKS'] = north['remarks_t']
# fill TTT table and thin data
north_ttt = templ_ttt.copy()
north_ttt['POINT_LAT'] = lats
north_ttt['POINT_LON'] = lons
north_ttt['ELEVATION'] = np.nan
north_ttt['THICKNESS'] = (north_dat[2]).astype(np.float64)
north_ttt['THICKNESS_UNCERTAINTY'] = (north_dat[3]).astype(np.float64)
# old point thinning
#north_ttt = north_ttt.groupby(north_ttt.index // thin_value_north).mean() # average over every x values as specified above
# thin the data and filter "mean on empyt slice" warnings (in case there is no elevation)
with warnings.catch_warnings():
warnings.simplefilter("ignore", category=RuntimeWarning)
north_ttt = p_thin(north_ttt, xcol='POINT_LON', ycol='POINT_LAT', datacols=['ELEVATION', 'THICKNESS',
'THICKNESS_UNCERTAINTY'],
radius=search_dist_north, method='nanmean', units='deg')
north_ttt['POINT_ID'] = range(1,len(north_ttt)+1)
north_ttt['GlaThiDa_ID'] = north['gid']
north_ttt['POLITICAL_UNIT'] = north['punit']
north_ttt['GLACIER_NAME'] = north['gname']
north_ttt['SURVEY_DATE'] = north['survey_date']
north_ttt['DATA_FLAG'] = north['dflag']
north_ttt['REMARKS'] = north['remarks_ttt'] + ' Point data have been thinned (mean) within a search distance of %s m.' % search_dist_north
# do the reformatting again (doesn't work in combination with astype)
north_ttt['THICKNESS'] = north_ttt['THICKNESS'].round()
north_ttt['THICKNESS_UNCERTAINTY'] = north_ttt['THICKNESS_UNCERTAINTY'].round(2)
north_t = north_t[['GlaThiDa_ID', 'POLITICAL_UNIT', 'GLACIER_NAME', 'SOURCE_ID', 'GLACIER_ID', 'LAT', 'LON', 'SURVEY_DATE',
'DEM_DATE', 'AREA', 'MEAN_SLOPE', 'MEAN_THICKNESS', 'MEAN_THICKNESS_UNCERTAINTY', 'MAXIMUM_THICKNESS',
'MAX_THICKNESS_UNCERTAINTY', 'SURVEY_METHOD', 'SURVEY_METHOD_DETAILS', 'NUMBER_OF_SURVEY_POINTS',
'NUMBER_OF_SURVEY_PROFILES', 'TOTAL_LENGTH_OF_SURVEY_PROFILES', 'INTERPOLATION_METHOD', 'INVESTIGATOR',
'SPONSORING_AGENCY', 'REFERENCES', 'DATA_FLAG', 'REMARKS']]
north_ttt = north_ttt[['GlaThiDa_ID','POLITICAL_UNIT','GLACIER_NAME','SURVEY_DATE','POINT_ID','POINT_LAT','POINT_LON',
'ELEVATION','THICKNESS','THICKNESS_UNCERTAINTY','DATA_FLAG','REMARKS']]
north_t.to_excel(os.path.join(os.path.dirname(north_path),north_path.split('.')[0]+'_Johannes_T.xls'), index=False)
north_ttt.to_excel(os.path.join(os.path.dirname(north_path),north_path.split('.')[0]+'_Johannes_TTT.xls'), index=False)
In [58]:
utm7n = pyproj.Proj(init='epsg:32607') # UTM 7N (WGS84)
latlon = pyproj.Proj(init='epsg:4326') #WGS84 lat/lon
In [59]:
# constants
south = {'punit': 'CA',
'gname': 'SOUTH',
'src_id':'',
'src_g_id':'',
'survey_date': '20119999',
'dem_date': '20079999',
'gid': gtd_ids['south'],
'lat': 60.817771,
'lon': -139.124004,
'area': 5.3,
'mean_sl': np.nan,
'mean_th': np.nan,
'mean_th_unc': np.nan,
'max_th': np.nan,
'max_th_unc': np.nan,
'surv_meth': 'OTH',
'surv_meth_det': 'Radar sounding data at center frequencies of 10, 35, and 50MHz were collected using the ice-penetrating radar system described by Mingo and Flowers [2010].',
'no_points': 9618,
'no_prfls': np.nan,
'length_prfls': np.nan,
'interp_meth':'KRG',
'investig':'Nat WILSON, Glenn FLOWERS, Laurent MINGO',
'spons_ag':'',
'ref':'Wilson, N. J., Flowers, G. E., & Mingo, L. (2013). Journal of Geophysical Research: Earth Surface, 118(3), pp.1443-1459. DOI:10.1002/jgrf.20096',
'dflag':np.nan,
'remarks_t':'Lat/lon from Google Earth. Survey methods: GPRt and DRIh',
'remarks_ttt':''
}
search_dist_south = 10
In [60]:
south_path = 'C:\\Users\\jlandman\\Desktop\\newData\\WG_Farinotti_data_package\\north_and_south_glacier\\depth_GL1_080911.xyz'
# read original data
south_dat = pd.read_csv(south_path, header=None, sep='\t') # 0:easting (m), 1:northing (m), 2:depth (m), 3:estimated picking error (m)
# convert New Real TM to WGS84 lat/lon
xs = south_dat[0].values
ys = south_dat[1].values
x1,y1 = utm7n(xs, ys)
lons, lats = pyproj.transform(utm7n,latlon,xs,ys)
# fill T table
south_t = templ_t.copy()
south_t.loc[0,'GlaThiDa_ID'] = south['gid']
south_t.loc[0,'POLITICAL_UNIT'] = south['punit']
south_t.loc[0,'GLACIER_NAME'] = south['gname']
south_t.loc[0,'SOURCE_ID'] = south['src_id']
south_t.loc[0,'GLACIER_ID'] = south['src_g_id']
south_t.loc[0,'LAT'] = south['lat']
south_t.loc[0,'LON'] = south['lon']
south_t.loc[0,'SURVEY_DATE'] = south['survey_date']
south_t.loc[0,'DEM_DATE'] = south['dem_date']
south_t.loc[0,'AREA'] = south['area']
south_t.loc[0,'MEAN_SLOPE'] = south['mean_sl']
south_t.loc[0,'MEAN_THICKNESS'] = south['mean_th']
south_t.loc[0,'MEAN_THICKNESS_UNCERTAINTY'] = south['mean_th_unc']
south_t.loc[0,'MAXIMUM_THICKNESS'] = south['max_th']
south_t.loc[0,'MAX_THICKNESS_UNCERTAINTY'] = south['max_th_unc']
south_t.loc[0,'SURVEY_METHOD'] = south['surv_meth']
south_t.loc[0,'SURVEY_METHOD_DETAILS'] = south['surv_meth_det']
south_t.loc[0,'NUMBER_OF_SURVEY_POINTS'] = south['no_points']
south_t.loc[0,'NUMBER_OF_SURVEY_PROFILES'] = south['no_prfls']
south_t.loc[0,'TOTAL_LENGTH_OF_SURVEY_PROFILES'] = south['length_prfls']
south_t.loc[0,'INTERPOLATION_METHOD'] = south['interp_meth']
south_t.loc[0,'INVESTIGATOR'] = south['investig']
south_t.loc[0,'SPONSORING_AGENCY'] = south['spons_ag']
south_t.loc[0,'REFERENCES'] = south['ref']
south_t.loc[0,'DATA_FLAG'] = south['dflag']
south_t.loc[0,'REMARKS'] = south['remarks_t']
# fill TTT table and thin data
south_ttt = templ_ttt.copy()
south_ttt['POINT_ID'] = range(1,len(south_dat)+1)
south_ttt['POINT_LAT'] = lats
south_ttt['POINT_LON'] = lons
south_ttt['ELEVATION'] = np.nan
south_ttt['THICKNESS'] = (south_dat[2]).astype(np.float64)
south_ttt['THICKNESS_UNCERTAINTY'] = (south_dat[3]).astype(np.float64)
# old point thinning
# south_ttt = south_ttt.groupby(south_ttt.index // thin_value_south).mean() # average over every x values as specified above
with warnings.catch_warnings():
warnings.simplefilter("ignore", category=RuntimeWarning)
south_ttt = p_thin(south_ttt, xcol='POINT_LON', ycol='POINT_LAT', datacols=['ELEVATION', 'THICKNESS',
'THICKNESS_UNCERTAINTY'],
radius=search_dist_south, method='nanmean', units='deg')
south_ttt['GlaThiDa_ID'] = south['gid']
south_ttt['POLITICAL_UNIT'] = south['punit']
south_ttt['GLACIER_NAME'] = south['gname']
south_ttt['SURVEY_DATE'] = south['survey_date']
south_ttt['DATA_FLAG'] = south['dflag']
south_ttt['REMARKS'] = south['remarks_ttt']
# do the reformatting again (doesn't work in combination with astype)
south_ttt['THICKNESS'] = south_ttt['THICKNESS'].round()
south_ttt['THICKNESS_UNCERTAINTY'] = south_ttt['THICKNESS_UNCERTAINTY'].round(2)
south_t = south_t[['GlaThiDa_ID', 'POLITICAL_UNIT', 'GLACIER_NAME', 'SOURCE_ID', 'GLACIER_ID', 'LAT', 'LON', 'SURVEY_DATE',
'DEM_DATE', 'AREA', 'MEAN_SLOPE', 'MEAN_THICKNESS', 'MEAN_THICKNESS_UNCERTAINTY', 'MAXIMUM_THICKNESS',
'MAX_THICKNESS_UNCERTAINTY', 'SURVEY_METHOD', 'SURVEY_METHOD_DETAILS', 'NUMBER_OF_SURVEY_POINTS',
'NUMBER_OF_SURVEY_PROFILES', 'TOTAL_LENGTH_OF_SURVEY_PROFILES', 'INTERPOLATION_METHOD', 'INVESTIGATOR',
'SPONSORING_AGENCY', 'REFERENCES', 'DATA_FLAG', 'REMARKS']]
south_ttt = south_ttt[['GlaThiDa_ID','POLITICAL_UNIT','GLACIER_NAME','SURVEY_DATE','POINT_ID','POINT_LAT','POINT_LON',
'ELEVATION','THICKNESS','THICKNESS_UNCERTAINTY','DATA_FLAG','REMARKS']]
south_t.to_excel(os.path.join(os.path.dirname(south_path),south_path.split('.')[0]+'_Johannes_T.xls'), index=False)
south_ttt.to_excel(os.path.join(os.path.dirname(south_path),south_path.split('.')[0]+'_Johannes_TTT.xls'), index=False)
In [5]:
aqps = pyproj.Proj(init='epsg:3031') # Antarctica polar stereographic
latlon = pyproj.Proj(init='epsg:4326') #WGS84 lat/lon
In [6]:
# constants
starb = {'punit': 'AQ',
'gname': 'STARBUCK',
'src_id':'',
'src_g_id':'',
'survey_date': '20121299',
'dem_date': '20079999',
'gid': gtd_ids['starbuck_gprt'],
'lat': -65.614439,
'lon': -62.418465,
'area': 258,
'mean_sl': np.nan,
'mean_th': 312,
'mean_th_unc': 30,
'max_th': 1020,
'max_th_unc': np.nan,
'surv_meth': 'GPRt',
'surv_meth_det': 'Deep-Look Radio-Echo Sounder (DELORES), 1-4 MHz range (3MHz central frequ.)',
'no_points': 1315,
'no_prfls': 41,
'length_prfls': 90,
'interp_meth':'',
'investig':'Daniel FARINOTTI and colleagues',
'spons_ag':'',
'ref':'Farinotti, D. et al. (2014). Annals of Glaciology, 55(67), pp. 22-28. DOI:10.3189/2014AoG67A025',
'dflag':np.nan,
'remarks_t':'Lat/lon from Google Earth. DEM from Cook et al. (2012) (doi: 10.5194/essd-4-129-2012)',
'remarks_ttt':''
}
search_dist_starbuck = 10
In [31]:
starb_path = 'C:\\Users\\jlandman\\Desktop\\newData\\WG_Farinotti_data_package\\starbuck\\starbuck_RES_data_including_MCoRDS.txt'
# read original data
starb_dat = pd.read_csv(starb_path, comment='#', delim_whitespace=True)
# select here the GPRt measurements
starb_dat = starb_dat[starb_dat.orgnm != 'icebridge']
# convert New Real TM to WGS84 lat/lon
xs = starb_dat['lon'].values
ys = starb_dat['lat'].values
x1,y1 = aqps(xs, ys)
lons, lats = pyproj.transform(aqps,latlon,xs,ys)
# fill T table
starb_t = templ_t.copy()
starb_t.loc[0,'GlaThiDa_ID'] = starb['gid']
starb_t.loc[0,'POLITICAL_UNIT'] = starb['punit']
starb_t.loc[0,'GLACIER_NAME'] = starb['gname']
starb_t.loc[0,'SOURCE_ID'] = starb['src_id']
starb_t.loc[0,'GLACIER_ID'] = starb['src_g_id']
starb_t.loc[0,'LAT'] = starb['lat']
starb_t.loc[0,'LON'] = starb['lon']
starb_t.loc[0,'SURVEY_DATE'] = starb['survey_date']
starb_t.loc[0,'DEM_DATE'] = starb['dem_date']
starb_t.loc[0,'AREA'] = starb['area']
starb_t.loc[0,'MEAN_SLOPE'] = starb['mean_sl']
starb_t.loc[0,'MEAN_THICKNESS'] = starb['mean_th']
starb_t.loc[0,'MEAN_THICKNESS_UNCERTAINTY'] = starb['mean_th_unc']
starb_t.loc[0,'MAXIMUM_THICKNESS'] = starb['max_th']
starb_t.loc[0,'MAX_THICKNESS_UNCERTAINTY'] = starb['max_th_unc']
starb_t.loc[0,'SURVEY_METHOD'] = starb['surv_meth']
starb_t.loc[0,'SURVEY_METHOD_DETAILS'] = starb['surv_meth_det']
starb_t.loc[0,'NUMBER_OF_SURVEY_POINTS'] = starb['no_points']
starb_t.loc[0,'NUMBER_OF_SURVEY_PROFILES'] = starb['no_prfls']
starb_t.loc[0,'TOTAL_LENGTH_OF_SURVEY_PROFILES'] = starb['length_prfls']
starb_t.loc[0,'INTERPOLATION_METHOD'] = starb['interp_meth']
starb_t.loc[0,'INVESTIGATOR'] = starb['investig']
starb_t.loc[0,'SPONSORING_AGENCY'] = starb['spons_ag']
starb_t.loc[0,'REFERENCES'] = starb['ref']
starb_t.loc[0,'DATA_FLAG'] = starb['dflag']
starb_t.loc[0,'REMARKS'] = starb['remarks_t']
# fill TTT table
starb_ttt = templ_ttt.copy()
starb_ttt['POINT_LAT'] = lats
starb_ttt['POINT_LON'] = lons
starb_ttt['ELEVATION'] = starb_dat['z_surf']
starb_ttt['THICKNESS'] = starb_dat['ice_th']
starb_ttt['THICKNESS_UNCERTAINTY'] = np.nan
with warnings.catch_warnings():
warnings.simplefilter("ignore", category=RuntimeWarning)
starb_ttt = p_thin(starb_ttt, xcol='POINT_LON', ycol='POINT_LAT', datacols=['ELEVATION', 'THICKNESS',
'THICKNESS_UNCERTAINTY'],
radius=search_dist_starbuck, method='nanmean', units='deg')
starb_ttt['THICKNESS'] = starb_ttt['THICKNESS'].round()
starb_ttt['THICKNESS_UNCERTAINTY'] = starb_ttt['THICKNESS_UNCERTAINTY'].round()
starb_ttt['ELEVATION'] = starb_ttt['ELEVATION'].round()
starb_ttt['POINT_ID'] = range(1,len(starb_ttt)+1)
starb_ttt['GlaThiDa_ID'] = starb['gid']
starb_ttt['POLITICAL_UNIT'] = starb['punit']
starb_ttt['GLACIER_NAME'] = starb['gname']
starb_ttt['SURVEY_DATE'] = starb['survey_date']
starb_ttt['DATA_FLAG'] = starb['dflag']
starb_ttt['REMARKS'] = starb['remarks_ttt'] + ' Point data have been thinned (mean) within a search distance of %s m.' % search_dist_starbuck
starb_t = starb_t[['GlaThiDa_ID', 'POLITICAL_UNIT', 'GLACIER_NAME', 'SOURCE_ID', 'GLACIER_ID', 'LAT', 'LON', 'SURVEY_DATE',
'DEM_DATE', 'AREA', 'MEAN_SLOPE', 'MEAN_THICKNESS', 'MEAN_THICKNESS_UNCERTAINTY', 'MAXIMUM_THICKNESS',
'MAX_THICKNESS_UNCERTAINTY', 'SURVEY_METHOD', 'SURVEY_METHOD_DETAILS', 'NUMBER_OF_SURVEY_POINTS',
'NUMBER_OF_SURVEY_PROFILES', 'TOTAL_LENGTH_OF_SURVEY_PROFILES', 'INTERPOLATION_METHOD', 'INVESTIGATOR',
'SPONSORING_AGENCY', 'REFERENCES', 'DATA_FLAG', 'REMARKS']]
starb_ttt = starb_ttt[['GlaThiDa_ID','POLITICAL_UNIT','GLACIER_NAME','SURVEY_DATE','POINT_ID','POINT_LAT','POINT_LON',
'ELEVATION','THICKNESS','THICKNESS_UNCERTAINTY','DATA_FLAG','REMARKS']]
starb_t.to_excel(os.path.join(os.path.dirname(starb_path),starb_path.split('.')[0]+'_GPRt_Johannes_T.xls'), index=False)
starb_ttt.to_excel(os.path.join(os.path.dirname(starb_path),starb_path.split('.')[0]+'_GPRt_Johannes_TTT.xls'), index=False)
In [16]:
# constants
starb_i = {'punit': 'AQ',
'gname': 'STARBUCK',
'src_id':'',
'src_g_id':'',
'survey_date': '20111014',
'dem_date': '20079999',
'gid': gtd_ids['starbuck_icebr'],
'lat': -65.614439,
'lon': -62.418465,
'area': 258,
'mean_sl': np.nan,
'mean_th': np.nan,
'mean_th_unc': np.nan,
'max_th': np.nan,
'max_th_unc': np.nan,
'surv_meth': 'OTH',
'surv_meth_det': 'Deep-Look Radio-Echo Sounder (DELORES), 1-4 MHz range (3MHz central frequ.)',
'no_points': 103,
'no_prfls': np.nan,
'length_prfls': np.nan,
'interp_meth':'',
'investig':'',
'spons_ag':'',
'ref':'Farinotti, D. et al. (2014). Annals of Glaciology, 55(67), pp. 22-28. DOI:10.3189/2014AoG67A025',
'dflag':np.nan,
'remarks_t':'Survey Method: NASA IceBridge project (MCoRDS sensor). Data used as base for cross-validation in given reference. DEM from Cook et al. (2012) (doi: 10.5194/essd-4-129-2012)',
'remarks_ttt':''
}
In [29]:
starb_path = 'C:\\Users\\jlandman\\Desktop\\newData\\WG_Farinotti_data_package\\starbuck\\starbuck_RES_data_including_MCoRDS.txt'
# read original data
starb_i_dat = pd.read_csv(starb_path, comment='#', delim_whitespace=True)
# select here the GPRt measurements
starb_i_dat = starb_i_dat[starb_i_dat.orgnm == 'icebridge']
# convert New Real TM to WGS84 lat/lon
xs = starb_i_dat['lon'].values
ys = starb_i_dat['lat'].values
x1,y1 = aqps(xs, ys)
lons, lats = pyproj.transform(aqps,latlon,xs,ys)
# fill T table
starb_i_t = templ_t.copy()
starb_i_t.loc[0,'GlaThiDa_ID'] = starb_i['gid']
starb_i_t.loc[0,'POLITICAL_UNIT'] = starb_i['punit']
starb_i_t.loc[0,'GLACIER_NAME'] = starb_i['gname']
starb_i_t.loc[0,'SOURCE_ID'] = starb_i['src_id']
starb_i_t.loc[0,'GLACIER_ID'] = starb_i['src_g_id']
starb_i_t.loc[0,'LAT'] = starb_i['lat']
starb_i_t.loc[0,'LON'] = starb_i['lon']
starb_i_t.loc[0,'SURVEY_DATE'] = starb_i['survey_date']
starb_i_t.loc[0,'DEM_DATE'] = starb_i['dem_date']
starb_i_t.loc[0,'AREA'] = starb_i['area']
starb_i_t.loc[0,'MEAN_SLOPE'] = starb_i['mean_sl']
starb_i_t.loc[0,'MEAN_THICKNESS'] = starb_i['mean_th']
starb_i_t.loc[0,'MEAN_THICKNESS_UNCERTAINTY'] = starb_i['mean_th_unc']
starb_i_t.loc[0,'MAXIMUM_THICKNESS'] = starb_i['max_th']
starb_i_t.loc[0,'MAX_THICKNESS_UNCERTAINTY'] = starb_i['max_th_unc']
starb_i_t.loc[0,'SURVEY_METHOD'] = starb_i['surv_meth']
starb_i_t.loc[0,'SURVEY_METHOD_DETAILS'] = starb_i['surv_meth_det']
starb_i_t.loc[0,'NUMBER_OF_SURVEY_POINTS'] = starb_i['no_points']
starb_i_t.loc[0,'NUMBER_OF_SURVEY_PROFILES'] = starb_i['no_prfls']
starb_i_t.loc[0,'TOTAL_LENGTH_OF_SURVEY_PROFILES'] = starb_i['length_prfls']
starb_i_t.loc[0,'INTERPOLATION_METHOD'] = starb_i['interp_meth']
starb_i_t.loc[0,'INVESTIGATOR'] = starb_i['investig']
starb_i_t.loc[0,'SPONSORING_AGENCY'] = starb_i['spons_ag']
starb_i_t.loc[0,'REFERENCES'] = starb_i['ref']
starb_i_t.loc[0,'DATA_FLAG'] = starb_i['dflag']
starb_i_t.loc[0,'REMARKS'] = starb_i['remarks_t']
# fill TTT table
starb_i_ttt = templ_ttt.copy()
starb_i_ttt['POINT_ID'] = range(1,len(starb_i_dat)+1)
starb_i_ttt['POINT_LAT'] = lats
starb_i_ttt['POINT_LON'] = lons
starb_i_ttt['ELEVATION'] = starb_i_dat['z_surf'].values.round()
starb_i_ttt['THICKNESS'] = starb_i_dat['ice_th'].values.round()
starb_i_ttt['THICKNESS_UNCERTAINTY'] = np.nan
starb_i_ttt['GlaThiDa_ID'] = starb_i['gid']
starb_i_ttt['POLITICAL_UNIT'] = starb_i['punit']
starb_i_ttt['GLACIER_NAME'] = starb_i['gname']
starb_i_ttt['SURVEY_DATE'] = starb_i['survey_date']
starb_i_ttt['DATA_FLAG'] = starb_i['dflag']
starb_i_ttt['REMARKS'] = starb_i['remarks_ttt']
starb_i_t = starb_i_t[['GlaThiDa_ID', 'POLITICAL_UNIT', 'GLACIER_NAME', 'SOURCE_ID', 'GLACIER_ID', 'LAT', 'LON', 'SURVEY_DATE',
'DEM_DATE', 'AREA', 'MEAN_SLOPE', 'MEAN_THICKNESS', 'MEAN_THICKNESS_UNCERTAINTY', 'MAXIMUM_THICKNESS',
'MAX_THICKNESS_UNCERTAINTY', 'SURVEY_METHOD', 'SURVEY_METHOD_DETAILS', 'NUMBER_OF_SURVEY_POINTS',
'NUMBER_OF_SURVEY_PROFILES', 'TOTAL_LENGTH_OF_SURVEY_PROFILES', 'INTERPOLATION_METHOD', 'INVESTIGATOR',
'SPONSORING_AGENCY', 'REFERENCES', 'DATA_FLAG', 'REMARKS']]
starb_i_ttt = starb_i_ttt[['GlaThiDa_ID','POLITICAL_UNIT','GLACIER_NAME','SURVEY_DATE','POINT_ID','POINT_LAT','POINT_LON',
'ELEVATION','THICKNESS','THICKNESS_UNCERTAINTY','DATA_FLAG','REMARKS']]
starb_i_t.to_excel(os.path.join(os.path.dirname(starb_path),starb_path.split('.')[0]+'_IceBridge_Johannes_T.xls'), index=False)
starb_i_ttt.to_excel(os.path.join(os.path.dirname(starb_path),starb_path.split('.')[0]+'_IceBridge_Johannes_TTT.xls'), index=False)
In [66]:
nzgd = pyproj.Proj(init='epsg:2193') # NZGD2000_New_Zealand_Transverse_Mercator_2000
latlon = pyproj.Proj(init='epsg:4326') #WGS84 lat/lon
In [67]:
# constants
tasman = {'punit': 'NZ',
'gname': 'TASMAN',
'src_id':'',
'src_g_id':'',
'survey_date': '',
'dem_date': '19869999',
'gid': gtd_ids['tasman'],
'lat': -43.52,
'lon': 170.32,
'area': 100.3,
'mean_sl': 19,
'mean_th': np.nan,
'mean_th_unc': np.nan,
'max_th': np.nan,
'max_th_unc': np.nan,
'surv_meth': 'SEI',
'surv_meth_det': 'Two generalised cross profiles of ice thickness are inferred from reflection seismics. x, y and ice thickness values are taken at points spaced along the published profiles (Figure 18, p23). A map of interpolated ice thickness (Figure 19, p26), but it is only based on measurements at the two widely-spaced profiles',
'no_points': np.nan,
'no_prfls': 2,
'length_prfls': 3,
'interp_meth':'',
'investig':'',
'spons_ag':'',
'ref':'Anderton, P. W. (1975). Hydrological Research Annual Report 33. Ministry of Works and Development.',
'dflag':np.nan,
'remarks_t':'No mean/max thickness given as there are only highly varying estimates for two cross-sections of the glacier in the reference. Mean slope calculated from DEM by WGMS.',
'remarks_ttt':''
}
In [68]:
tasman_path = 'C:\\Users\\jlandman\\Desktop\\newData\\WG_Farinotti_data_package\\tasman\\tasman_anderton_bed_xyz.csv'
# read original data
tasman_dat = pd.read_csv(tasman_path)
# convert New Real TM to WGS84 lat/lon
xs = tasman_dat['nztm_easting (m)'].values
ys = tasman_dat[' nztm_northing (m)'].values
x1,y1 = nzgd(xs, ys)
lons, lats = pyproj.transform(nzgd,latlon,xs,ys)
# fill T table
tasman_t = templ_t.copy()
tasman_t.loc[0,'GlaThiDa_ID'] = tasman['gid']
tasman_t.loc[0,'POLITICAL_UNIT'] = tasman['punit']
tasman_t.loc[0,'GLACIER_NAME'] = tasman['gname']
tasman_t.loc[0,'SOURCE_ID'] = tasman['src_id']
tasman_t.loc[0,'GLACIER_ID'] = tasman['src_g_id']
tasman_t.loc[0,'LAT'] = tasman['lat']
tasman_t.loc[0,'LON'] = tasman['lon']
tasman_t.loc[0,'SURVEY_DATE'] = tasman['survey_date']
tasman_t.loc[0,'DEM_DATE'] = tasman['dem_date']
tasman_t.loc[0,'AREA'] = tasman['area']
tasman_t.loc[0,'MEAN_SLOPE'] = tasman['mean_sl']
tasman_t.loc[0,'MEAN_THICKNESS'] = tasman['mean_th']
tasman_t.loc[0,'MEAN_THICKNESS_UNCERTAINTY'] = tasman['mean_th_unc']
tasman_t.loc[0,'MAXIMUM_THICKNESS'] = tasman['max_th']
tasman_t.loc[0,'MAX_THICKNESS_UNCERTAINTY'] = tasman['max_th_unc']
tasman_t.loc[0,'SURVEY_METHOD'] = tasman['surv_meth']
tasman_t.loc[0,'SURVEY_METHOD_DETAILS'] = tasman['surv_meth_det']
tasman_t.loc[0,'NUMBER_OF_SURVEY_POINTS'] = tasman['no_points']
tasman_t.loc[0,'NUMBER_OF_SURVEY_PROFILES'] = tasman['no_prfls']
tasman_t.loc[0,'TOTAL_LENGTH_OF_SURVEY_PROFILES'] = tasman['length_prfls']
tasman_t.loc[0,'INTERPOLATION_METHOD'] = tasman['interp_meth']
tasman_t.loc[0,'INVESTIGATOR'] = tasman['investig']
tasman_t.loc[0,'SPONSORING_AGENCY'] = tasman['spons_ag']
tasman_t.loc[0,'REFERENCES'] = tasman['ref']
tasman_t.loc[0,'DATA_FLAG'] = tasman['dflag']
tasman_t.loc[0,'REMARKS'] = tasman['remarks_t']
# fill TTT table
tasman_ttt = templ_ttt.copy()
tasman_ttt['POINT_ID'] = range(1,len(tasman_dat)+1)
tasman_ttt['POINT_LAT'] = lats
tasman_ttt['POINT_LON'] = lons
tasman_ttt['ELEVATION'] = tasman_dat[' surface_elevation (m asl)'].round()
tasman_ttt['THICKNESS'] = tasman_dat[' ice_thickness (m)'].round()
tasman_ttt['THICKNESS_UNCERTAINTY'] = np.nan
tasman_ttt['GlaThiDa_ID'] = tasman['gid']
tasman_ttt['POLITICAL_UNIT'] = tasman['punit']
tasman_ttt['GLACIER_NAME'] = tasman['gname']
tasman_ttt['SURVEY_DATE'] = tasman['survey_date']
tasman_ttt['DATA_FLAG'] = tasman['dflag']
tasman_ttt['REMARKS'] = tasman['remarks_ttt']
tasman_t = tasman_t[['GlaThiDa_ID', 'POLITICAL_UNIT', 'GLACIER_NAME', 'SOURCE_ID', 'GLACIER_ID', 'LAT', 'LON', 'SURVEY_DATE',
'DEM_DATE', 'AREA', 'MEAN_SLOPE', 'MEAN_THICKNESS', 'MEAN_THICKNESS_UNCERTAINTY', 'MAXIMUM_THICKNESS',
'MAX_THICKNESS_UNCERTAINTY', 'SURVEY_METHOD', 'SURVEY_METHOD_DETAILS', 'NUMBER_OF_SURVEY_POINTS',
'NUMBER_OF_SURVEY_PROFILES', 'TOTAL_LENGTH_OF_SURVEY_PROFILES', 'INTERPOLATION_METHOD', 'INVESTIGATOR',
'SPONSORING_AGENCY', 'REFERENCES', 'DATA_FLAG', 'REMARKS']]
tasman_ttt = tasman_ttt[['GlaThiDa_ID','POLITICAL_UNIT','GLACIER_NAME','SURVEY_DATE','POINT_ID','POINT_LAT','POINT_LON',
'ELEVATION','THICKNESS','THICKNESS_UNCERTAINTY','DATA_FLAG','REMARKS']]
tasman_t.to_excel(os.path.join(os.path.dirname(tasman_path),tasman_path.split('.')[0]+'_Johannes_T.xls'), index=False)
tasman_ttt.to_excel(os.path.join(os.path.dirname(tasman_path),tasman_path.split('.')[0]+'_Johannes_TTT.xls'), index=False)
In [69]:
# constants
urum = {'punit': 'CN',
'gname': 'URUMQI NO.1',
'src_id':'OTH',
'src_g_id':'',
'survey_date': '20149999',
'dem_date': '20129999',
'gid': gtd_ids['urumqui'],
'lat': 43.111240,
'lon': 86.810015,
'area': 1.59,
'mean_sl': 20,
'mean_th': 44.5,
'mean_th_unc': np.nan,
'max_th': 141,
'max_th_unc': np.nan,
'surv_meth': 'GPRt',
'surv_meth_det': '',
'no_points': 1387,
'no_prfls': 16,
'length_prfls': np.nan,
'interp_meth':'KRG',
'investig':'',
'spons_ag':'',
'ref':'Wang, P. et al. (2016). Environmental Earth Sciences, 75(8), pp. 1-11. DOI: 10.1007/s12665-016-5551-3',
'dflag':np.nan,
'remarks_t':'Lat/lon from Google Earth and located on upper tongue of eastern branch. Mean slope calculated from DEM by WGMS.',
'remarks_ttt':''
}
In [70]:
urum_path_w = 'C:\\Users\\jlandman\\Desktop\\newData\\WG_Farinotti_data_package\\urumqui\\west_branch\\original_west.shp'
urum_path_e = 'C:\\Users\\jlandman\\Desktop\\newData\\WG_Farinotti_data_package\\urumqui\\east_branch\\original_east.shp'
urum_out = 'C:\\Users\\jlandman\\Desktop\\newData\\WG_Farinotti_data_package\\urumqui\\'
# read original data
urumw_dat = gpd.read_file(urum_path_w)
urume_dat = gpd.read_file(urum_path_e)
urumall_dat = urumw_dat.append(urume_dat,ignore_index=True)
urumall_dat = urumall_dat.reindex(range(1,len(urumall_dat)+1))
lats = urumall_dat.N
lons = urumall_dat.E
elev = urumall_dat.a_s_l.round()
ice_th = urumall_dat['冰川厚度('].round()
# fill T table
urum_t = templ_t.copy()
urum_t.loc[0,'GlaThiDa_ID'] = urum['gid']
urum_t.loc[0,'POLITICAL_UNIT'] = urum['punit']
urum_t.loc[0,'GLACIER_NAME'] = urum['gname']
urum_t.loc[0,'SOURCE_ID'] = urum['src_id']
urum_t.loc[0,'GLACIER_ID'] = urum['src_g_id']
urum_t.loc[0,'LAT'] = urum['lat']
urum_t.loc[0,'LON'] = urum['lon']
urum_t.loc[0,'SURVEY_DATE'] = urum['survey_date']
urum_t.loc[0,'DEM_DATE'] = urum['dem_date']
urum_t.loc[0,'AREA'] = urum['area']
urum_t.loc[0,'MEAN_SLOPE'] = urum['mean_sl']
urum_t.loc[0,'MEAN_THICKNESS'] = urum['mean_th']
urum_t.loc[0,'MEAN_THICKNESS_UNCERTAINTY'] = urum['mean_th_unc']
urum_t.loc[0,'MAXIMUM_THICKNESS'] = urum['max_th']
urum_t.loc[0,'MAX_THICKNESS_UNCERTAINTY'] = urum['max_th_unc']
urum_t.loc[0,'SURVEY_METHOD'] = urum['surv_meth']
urum_t.loc[0,'SURVEY_METHOD_DETAILS'] = urum['surv_meth_det']
urum_t.loc[0,'NUMBER_OF_SURVEY_POINTS'] = urum['no_points']
urum_t.loc[0,'NUMBER_OF_SURVEY_PROFILES'] = urum['no_prfls']
urum_t.loc[0,'TOTAL_LENGTH_OF_SURVEY_PROFILES'] = urum['length_prfls']
urum_t.loc[0,'INTERPOLATION_METHOD'] = urum['interp_meth']
urum_t.loc[0,'INVESTIGATOR'] = urum['investig']
urum_t.loc[0,'SPONSORING_AGENCY'] = urum['spons_ag']
urum_t.loc[0,'REFERENCES'] = urum['ref']
urum_t.loc[0,'DATA_FLAG'] = urum['dflag']
urum_t.loc[0,'REMARKS'] = urum['remarks_t']
# fill TTT table
urum_ttt = templ_ttt.copy()
urum_ttt['POINT_ID'] = range(1,len(urumw_dat)+len(urume_dat)+1)
urum_ttt['POINT_LAT'] = lats
urum_ttt['POINT_LON'] = lons
urum_ttt['ELEVATION'] = elev
urum_ttt['THICKNESS'] = ice_th
urum_ttt['THICKNESS_UNCERTAINTY'] = np.nan
urum_ttt['GlaThiDa_ID'] = urum['gid']
urum_ttt['POLITICAL_UNIT'] = urum['punit']
urum_ttt['GLACIER_NAME'] = urum['gname']
urum_ttt['SURVEY_DATE'] = urum['survey_date']
urum_ttt['DATA_FLAG'] = urum['dflag']
urum_ttt['REMARKS'] = urum['remarks_ttt']
urum_t = urum_t[['GlaThiDa_ID', 'POLITICAL_UNIT', 'GLACIER_NAME', 'SOURCE_ID', 'GLACIER_ID', 'LAT', 'LON', 'SURVEY_DATE',
'DEM_DATE', 'AREA', 'MEAN_SLOPE', 'MEAN_THICKNESS', 'MEAN_THICKNESS_UNCERTAINTY', 'MAXIMUM_THICKNESS',
'MAX_THICKNESS_UNCERTAINTY', 'SURVEY_METHOD', 'SURVEY_METHOD_DETAILS', 'NUMBER_OF_SURVEY_POINTS',
'NUMBER_OF_SURVEY_PROFILES', 'TOTAL_LENGTH_OF_SURVEY_PROFILES', 'INTERPOLATION_METHOD', 'INVESTIGATOR',
'SPONSORING_AGENCY', 'REFERENCES', 'DATA_FLAG', 'REMARKS']]
urum_ttt = urum_ttt[['GlaThiDa_ID','POLITICAL_UNIT','GLACIER_NAME','SURVEY_DATE','POINT_ID','POINT_LAT','POINT_LON',
'ELEVATION','THICKNESS','THICKNESS_UNCERTAINTY','DATA_FLAG','REMARKS']]
urum_t.to_excel(os.path.join(urum_out,'Urumqi_Johannes_T.xls'), index=False)
urum_ttt.to_excel(os.path.join(urum_out,'Urumqi_Johannes_TTT.xls'), index=False)
In [71]:
# constants
wwmw = {'punit': 'CA',
'gname': 'WEST WASHMAWAPTA',
'src_id':'',
'src_g_id':'',
'survey_date': '20069999',
'dem_date': '20079999',
'gid': gtd_ids['west_washmawapta'],
'lat': 51.1833,
'lon': -116.3189,
'area': 1,
'mean_sl': 16,
'mean_th': 70,
'mean_th_unc': 7,
'max_th': 185,
'max_th_unc': 13,
'surv_meth': 'GPRt',
'surv_meth_det': 'Icefield Instruments icepenetrating radar with 5 MHz center frequency and 10m antennae 50m apart. Assuming a signal propagation speed of 1.68*10^8 m/s, and migrating individual profiles using the circle-tangent technique.',
'no_points': 199,
'no_prfls': 21,
'length_prfls': np.nan,
'interp_meth':'',
'investig':'',
'spons_ag':'',
'ref':'Sanders, J. W. et al. (2010). American Journal of Science, 310(8), pp.753-773. DOI: 10.2475/08.2010.03]',
'dflag':np.nan,
'remarks_t':'Near the steeply-sloping marginal walls, where the ice is thin and the topography complex, errors increased to 13 +- 2 m (27 +- 8%). Mean slope calculated from DEM by WGMS.',
'remarks_ttt':''
}
In [72]:
utm11n = pyproj.Proj(init='epsg:32611') # UTM 11N (WGS84)
latlon = pyproj.Proj(init='epsg:4326') #WGS84 lat/lon
In [73]:
wwmw_path = 'C:\\Users\\jlandman\\Desktop\\newData\\WG_Farinotti_data_package\\west_washmawapta\\radar_table.txt'
# read original data
wwmw_dat = pd.read_csv(wwmw_path)
ice_th = (wwmw_dat['SurfaceElev'] - wwmw_dat['BedrockElev']).round()
# convert New Real TM to WGS84 lat/lon
xs = wwmw_dat['Easting'].values
ys = wwmw_dat['Northing'].values
x1,y1 = utm11n(xs, ys)
lons, lats = pyproj.transform(utm11n,latlon,xs,ys)
# fill T table
wwmw_t = templ_t.copy()
wwmw_t.loc[0,'GlaThiDa_ID'] = wwmw['gid']
wwmw_t.loc[0,'POLITICAL_UNIT'] = wwmw['punit']
wwmw_t.loc[0,'GLACIER_NAME'] = wwmw['gname']
wwmw_t.loc[0,'SOURCE_ID'] = wwmw['src_id']
wwmw_t.loc[0,'GLACIER_ID'] = wwmw['src_g_id']
wwmw_t.loc[0,'LAT'] = wwmw['lat']
wwmw_t.loc[0,'LON'] = wwmw['lon']
wwmw_t.loc[0,'SURVEY_DATE'] = wwmw['survey_date']
wwmw_t.loc[0,'DEM_DATE'] = wwmw['dem_date']
wwmw_t.loc[0,'AREA'] = wwmw['area']
wwmw_t.loc[0,'MEAN_SLOPE'] = wwmw['mean_sl']
wwmw_t.loc[0,'MEAN_THICKNESS'] = wwmw['mean_th']
wwmw_t.loc[0,'MEAN_THICKNESS_UNCERTAINTY'] = wwmw['mean_th_unc']
wwmw_t.loc[0,'MAXIMUM_THICKNESS'] = wwmw['max_th']
wwmw_t.loc[0,'MAX_THICKNESS_UNCERTAINTY'] = wwmw['max_th_unc']
wwmw_t.loc[0,'SURVEY_METHOD'] = wwmw['surv_meth']
wwmw_t.loc[0,'SURVEY_METHOD_DETAILS'] = wwmw['surv_meth_det']
wwmw_t.loc[0,'NUMBER_OF_SURVEY_POINTS'] = wwmw['no_points']
wwmw_t.loc[0,'NUMBER_OF_SURVEY_PROFILES'] = wwmw['no_prfls']
wwmw_t.loc[0,'TOTAL_LENGTH_OF_SURVEY_PROFILES'] = wwmw['length_prfls']
wwmw_t.loc[0,'INTERPOLATION_METHOD'] = wwmw['interp_meth']
wwmw_t.loc[0,'INVESTIGATOR'] = wwmw['investig']
wwmw_t.loc[0,'SPONSORING_AGENCY'] = wwmw['spons_ag']
wwmw_t.loc[0,'REFERENCES'] = wwmw['ref']
wwmw_t.loc[0,'DATA_FLAG'] = wwmw['dflag']
wwmw_t.loc[0,'REMARKS'] = wwmw['remarks_t']
# fill TTT table
wwmw_ttt = templ_ttt.copy()
wwmw_ttt['POINT_ID'] = range(1,len(wwmw_dat)+1)
wwmw_ttt['POINT_LAT'] = lats
wwmw_ttt['POINT_LON'] = lons
wwmw_ttt['ELEVATION'] = wwmw_dat['SurfaceElev'].round()
wwmw_ttt['THICKNESS'] = ice_th
wwmw_ttt['THICKNESS_UNCERTAINTY'] = np.nan
wwmw_ttt['GlaThiDa_ID'] = wwmw['gid']
wwmw_ttt['POLITICAL_UNIT'] = wwmw['punit']
wwmw_ttt['GLACIER_NAME'] = wwmw['gname']
wwmw_ttt['SURVEY_DATE'] = wwmw['survey_date']
wwmw_ttt['DATA_FLAG'] = wwmw['dflag']
wwmw_ttt['REMARKS'] = wwmw['remarks_ttt']
wwmw_t = wwmw_t[['GlaThiDa_ID', 'POLITICAL_UNIT', 'GLACIER_NAME', 'SOURCE_ID', 'GLACIER_ID', 'LAT', 'LON', 'SURVEY_DATE',
'DEM_DATE', 'AREA', 'MEAN_SLOPE', 'MEAN_THICKNESS', 'MEAN_THICKNESS_UNCERTAINTY', 'MAXIMUM_THICKNESS',
'MAX_THICKNESS_UNCERTAINTY', 'SURVEY_METHOD', 'SURVEY_METHOD_DETAILS', 'NUMBER_OF_SURVEY_POINTS',
'NUMBER_OF_SURVEY_PROFILES', 'TOTAL_LENGTH_OF_SURVEY_PROFILES', 'INTERPOLATION_METHOD', 'INVESTIGATOR',
'SPONSORING_AGENCY', 'REFERENCES', 'DATA_FLAG', 'REMARKS']]
wwmw_ttt = wwmw_ttt[['GlaThiDa_ID','POLITICAL_UNIT','GLACIER_NAME','SURVEY_DATE','POINT_ID','POINT_LAT','POINT_LON',
'ELEVATION','THICKNESS','THICKNESS_UNCERTAINTY','DATA_FLAG','REMARKS']]
wwmw_t.to_excel(os.path.join(os.path.dirname(wwmw_path),'West_Washmawapta_Johannes_T.xls'), index=False)
wwmw_ttt.to_excel(os.path.join(os.path.dirname(wwmw_path),'West_Washmawapta_Johannes_TTT.xls'), index=False)