In [1]:
import numpy as np
import pandas as pd
import json
import halfspace.projections as hsp
from halfspace.scripts import *
In [2]:
tib_dog_pts = pd.read_csv('../data/fault_data/tib_dog_pts_df.csv')
gurla_pts = pd.read_csv('../data/fault_data/gurla_pts_df.csv')
In [3]:
tdg_pts = pd.concat((tib_dog_pts, gurla_pts), axis=0, ignore_index=True)
tdg_pts.columns = ['tri', 'vertex', 'east', 'north', 'depth', 'fault_name']
In [4]:
tdg_pts.tail()
Out[4]:
In [5]:
with open('../data/fault_data/tib_dog_tris.geojson') as f:
tdt = json.load(f)
with open('../data/fault_data/gurla_tris.geojson') as f:
gt = json.load(f)
In [6]:
stress_file = '../../nepal_2015/stress_arrays/nepal_topo_stress.h5'
stress_meta_file = '../../nepal_2015/stress_arrays/stress_calcs_meta.json'
dem_meta_file = '../../nepal_2015/data/dem/s_tibet_dem_meta.json'
In [7]:
tdg = calc_topo_stresses_at_pts(tdg_pts, stress_file=stress_file,
dem_meta_file=dem_meta_file,
stress_meta_file=stress_meta_file,
resolve_stress=False)
In [8]:
tdg.head()
Out[8]:
In [9]:
tdg_avg = tdg.groupby(('fault_name', 'tri')).mean()[['zz_stress',
'xy_stress',
'xz_stress','yz_stress',
'xx_stress','yy_stress']]
In [10]:
tdg_avg.head()
Out[10]:
In [11]:
tdg_avg['i'] = range(tdg_avg.shape[0])
tdg_avg['fault_name'],tdg_avg['tri'] = zip(*tdg_avg.index.values)
In [12]:
tdg_avg.set_index('i', inplace=True)
In [13]:
tdg_avg.head(), tdg_avg.tail()
Out[13]:
In [14]:
fd = {'gurla': gt, 'tib_dog':tdt} # fault dict
In [15]:
gt['features'][0]
Out[15]:
In [16]:
def get_row(row):
g = fd[row.fault_name]
p = g['features'][row.tri]['properties']
o = {}
o['east'], o['north'], o['depth'] = p['center']
o['strike'] = p['strike']
o['dip'] = p['dip']
o['rake'] = p['rake']
o['area_sq_km'] = p['area_sq_km']
return pd.Series(o)
In [17]:
get_row(tdg_avg.iloc[0])
Out[17]:
In [18]:
tdg_avg.apply(get_row, axis=1).head()
Out[18]:
In [19]:
tri_stress = pd.merge(tdg_avg, tdg_avg.apply(get_row, axis=1),
left_index=True, right_index=True)
In [20]:
tri_stress.head()
Out[20]:
In [21]:
tri_stress.rename(columns={'tri':'point_index'}, inplace=True)
In [22]:
tri_stress = resolve_stresses(tri_stress)
In [23]:
tri_stress.shape
Out[23]:
In [24]:
ts = tri_stress.set_index(['fault_name', 'point_index'])
In [25]:
ts.head()
Out[25]:
In [26]:
for i, feat in enumerate(tdt['features']):
stresses = ts.loc[('tib_dog', i), :]
feat['properties']['tau_d'] = stresses['tau_dd']
feat['properties']['tau_s'] = stresses['tau_ss']
feat['properties']['sigma_n'] = stresses['sig_nn']
feat['properties']['topo_stress_rake'] = hsp.get_rake_from_shear_components(
stresses['tau_dd'], stresses['tau_ss'])
feat['properties']['tau'] = np.sqrt((stresses['tau_dd']**2 + stresses['tau_ss']**2))
for i, feat in enumerate(gt['features']):
stresses = ts.loc[('gurla', i), :]
feat['properties']['tau_d'] = stresses['tau_dd']
feat['properties']['tau_s'] = stresses['tau_ss']
feat['properties']['sigma_n'] = stresses['sig_nn']
feat['properties']['topo_stress_rake'] = hsp.get_rake_from_shear_components(
stresses['tau_dd'], stresses['tau_ss'])
feat['properties']['tau'] = np.sqrt((stresses['tau_dd']**2 + stresses['tau_ss']**2))
In [27]:
tri_stress = tri_stress[tri_stress.tau_dd != 0.]
In [28]:
tri_stress.shape
Out[28]:
In [29]:
tri_stress.slip_m = 0.1
In [30]:
tri_stress.to_csv('../data/fault_data/wnfs_tris.csv', index=False)
In [31]:
tdt['features'][0]
Out[31]:
In [32]:
with open('../data/fault_data/tib_dog_tri_stresses.geojson', 'w') as f:
json.dump(tdt, f)
with open('../data/fault_data/gurla_tri_stresses.geojson', 'w') as f:
json.dump(gt, f)
In [33]:
import matplotlib.pyplot as plt
In [34]:
tri_stress[tri_stress.fault_name=='tib_dog'].rake.hist(bins=20)
plt.show()
In [35]:
tri_stress[tri_stress.fault_name=='gurla'].tau_dd.hist(bins=20)
plt.show()
In [36]:
ts[['xz_stress', 'yz_stress']].describe()
Out[36]: