In [19]:
import geopandas as gp
import pandas as pd
import matplotlib
import matplotlib.pyplot as plt
import seaborn as sb
from shapely.geometry import Polygon
import os

plt.rcParams['figure.figsize'] = (10, 20)
%matplotlib inline

In [20]:
base_file = "../data/AvalDet_20190424_155109_ref_20190313_trno_087_VV/AvalDet_20190424_155109_ref_20190313_trno_087_VV.shp"
gdf = gp.read_file(base_file)
gdf.drop(index=1, inplace=True)
gdf.head()


Out[20]:
area aspect det_count east length north raster_val refdate sat_geom source ... vh1_max vv0_mean vv0_median vv0_min vv0_max vh0_mean vh0_median vh0_min vh0_max geometry
0 10394.8943 None 1 20.944515 793.557988 69.714452 11.0 2019-03-13 15:50:19.000220 87 AvalDet_20190424_155109_ref_20190313_trno_087_... ... -15.749162 -10.2966 -10.15564 -13.197838 -7.651885 -19.937243 -20.155186 -22.938601 -16.017008 POLYGON ((20.94465714165575 69.71382402696912,...

1 rows × 45 columns


In [21]:
gdf.columns


Out[21]:
Index(['area', 'aspect', 'det_count', 'east', 'length', 'north', 'raster_val',
       'refdate', 'sat_geom', 'source', 't_0', 't_1', 'time', 'track_id',
       'uuid', 'width', 'dem_mean', 'dem_median', 'dem_min', 'dem_max',
       'slp_mean', 'slp_median', 'slp_min', 'slp_max', 'asp_mean',
       'asp_median', 'asp_min', 'asp_max', 'vv1_mean', 'vv1_median', 'vv1_min',
       'vv1_max', 'vh1_mean', 'vh1_median', 'vh1_min', 'vh1_max', 'vv0_mean',
       'vv0_median', 'vv0_min', 'vv0_max', 'vh0_mean', 'vh0_median', 'vh0_min',
       'vh0_max', 'geometry'],
      dtype='object')

The following columns are used in the import routine

  • time -> skredTidspunkt (should probably be t_1)
  • time -> registrertDato (should probably be t_1)
  • geometry -> SHAPE

  • dem_min -> hoydeStoppSkred_moh

  • asp_median -> eksposisjonUtlopsomr
  • slp_mean -> snittHelningUtlopsomr_gr
  • slp_max -> maksHelningUtlopsomr_gr
  • slp_min -> minHelningUtlopsomr_gr
  • area -> arealUtlopsomr_m2

Important: skredID needs to be the same in all tables.


In [22]:
import_list = ['time', 'area', 'dem_min', 'asp_median', 'slp_mean', 'slp_max', 'slp_min', 'geometry']

In [23]:
# Add a name to the polygon
gdf['_name'] = "Original"

# convert t_0 (the time the reference image was taken) into a datetime object and give it a descriptive name
gdf['_reference_date'] = pd.to_datetime(gdf['t_0']) # this actually overwrites the existing column "refdate", but since it is a duplicate of t_0 we don't care. 

# convert t_1 (the time the activity image was taken) into a datetime object and give it a descriptive name
gdf['_detection_date'] = pd.to_datetime(gdf['t_1'])

In [24]:
aval_map = gdf.plot(column="area", linewidth=0.3, edgecolor='black', cmap="OrRd", alpha=0.9)



In [25]:
plg = gdf['geometry'][0]
print(type(plg), plg)


<class 'shapely.geometry.polygon.Polygon'> POLYGON ((20.94465714165575 69.71382402696912, 20.94470735316531 69.71400241245273, 20.94419325473413 69.71401983461618, 20.9442434628668 69.7141982202462, 20.94372935925285 69.71421564107656, 20.94270114931648 69.71425047823899, 20.94285174061144 69.71478563650619, 20.94387997622828 69.71475079834599, 20.94393018369257 69.71492918406263, 20.94547254318632 69.71487691507734, 20.94598666121134 69.71485948908331, 20.94573554731867 69.71396756362762, 20.94522145069351 69.71398498878987, 20.94517123490418 69.71380660347262, 20.94465714165575 69.71382402696912))

In [26]:
# Define the base coordinates for the test polygons
e_base = 20.94
n_base= 69.71

scn = []

In [ ]:
le = e_base + 0.005 # left_easting
re = le + 0.002 # right_easting
bn = n_base + 0.005 # bottom_northing
tn = bn + 0.002 # top_northing
p1 = Polygon([(le, bn), (re, bn), (re, tn), (le, tn)])

# Create a copy of the original polygon and alter its properties
scn.append(gdf.copy(deep=True))
scn[0]['_name'] = "Scenario 1"
scn[0]['geometry'] = p1
print(scn[0].crs)
scn[0].to_crs({'init': 'epsg:32633'}, inplace=True) 
print(scn[0].crs)
scn[0]['area'] = scn[0]['geometry'].area
scn[0].to_crs(gdf.crs, inplace=True)

scn[0]['dem_min'] = 800
scn[0]['asp_median'] = 180
scn[0]['slp_mean'] = 9.0
scn[0]['slp_max'] = 12.0
scn[0]['slp_min'] = 4.0
scn[0]['time'] = '2019-04-27T15:51:09.025897'

scn[0]['_dis_id'] = 1 # used only to merge (dissolve) polygons for verification

In [28]:
scn[0].filter(import_list).head()


Out[28]:
time area dem_min asp_median slp_mean slp_max slp_min geometry
0 2019-04-27T15:51:09.025897 17280.273674 800 180 9.0 12.0 4.0 POLYGON ((20.945 69.71499999999999, 20.947 69....

In [29]:
le = e_base + 0.006 # left_easting
re = le + 0.002 # right_easting
bn = n_base + 0.004 # bottom_northing
tn = bn + 0.002 # top_northing
p2 = Polygon([(le, bn), (re, bn), (re, tn), (le, tn)])

# Create a copy of the original polygon and alter its properties
scn.append(gdf.copy(deep=True))
scn[1]['_name'] = "Scenario 2"
scn[1]['geometry'] = p2
scn[1].to_crs({'init': 'epsg:32633'}, inplace=True) 
scn[1]['area'] = scn[1]['geometry'].area
scn[1].to_crs(gdf.crs, inplace=True)

scn[1]['dem_min'] = 820
scn[1]['asp_median'] = 200
scn[1]['slp_mean'] = 10.0
scn[1]['slp_max'] = 15.0
scn[1]['slp_min'] = 6.0
scn[1]['time'] = '2019-04-28T15:51:09.025897'

scn[1]['_dis_id'] = 1 # used only to merge (dissolve) polygons for verification

In [30]:
le = e_base + 0.0055 # left_easting
re = le + 0.002 # right_easting
bn = n_base + 0.0055 # bottom_northing
tn = bn + 0.002 # top_northing
p3 = Polygon([(le, bn), (re, bn), (re, tn), (le, tn)])

# Create a copy of the original polygon and alter its properties
scn.append(gdf.copy(deep=True))
scn[2]['_name'] = "Scenario 3"
scn[2]['geometry'] = p3
scn[2].to_crs({'init': 'epsg:32633'}, inplace=True) 
scn[2]['area'] = scn[2]['geometry'].area
scn[2].to_crs(gdf.crs, inplace=True)

scn[2]['dem_min'] = 850
scn[2]['asp_median'] = 205
scn[2]['slp_mean'] = 9.0
scn[2]['slp_max'] = 13.0
scn[2]['slp_min'] = 5.0
scn[2]['time'] = '2019-05-12T15:51:09.025897'

scn[2]['_dis_id'] = 1 # used only to merge (dissolve) polygons for verification

In [31]:
le = e_base + 0.007 # left_easting
re = le + 0.003 # right_easting
bn = n_base + 0.008 # bottom_northing
tn = bn + 0.003 # top_northing
p4 = Polygon([(le, bn), (re, bn), (re, tn), (le, tn)])

# Create a copy of the original polygon and alter its properties
scn.append(gdf.copy(deep=True))
scn[3]['_name'] = "Scenario 4"
scn[3]['geometry'] = p4
scn[3].to_crs({'init': 'epsg:32633'}, inplace=True) 
scn[3]['area'] = scn[3]['geometry'].area
scn[3].to_crs(gdf.crs, inplace=True)

scn[3]['dem_min'] = 820
scn[3]['asp_median'] = 105
scn[3]['slp_mean'] = 3.0
scn[3]['slp_max'] = 8.0
scn[3]['slp_min'] = 5.2
scn[3]['time'] = '2019-04-28T15:51:09.025897'

scn[3]['_dis_id'] = 1 # used only to merge (dissolve) polygons for verification

In [32]:
le = e_base + 0.006 # left_easting
re = le + 0.002 # right_easting
bn = n_base + 0.006 # bottom_northing
tn = bn + 0.005 # top_northing
p5 = Polygon([(le, bn), (re, bn), (re, tn), (le, tn)])

# Create a copy of the original polygon and alter its properties
scn.append(gdf.copy(deep=True))
scn[4]['_name'] = "Scenario 5"
scn[4]['geometry'] = p5
scn[4].to_crs({'init': 'epsg:32633'}, inplace=True) 
scn[4]['area'] = scn[4]['geometry'].area
scn[4].to_crs(gdf.crs, inplace=True)

scn[4]['dem_min'] = 820
scn[4]['asp_median'] = 105
scn[4]['slp_mean'] = 3.0
scn[4]['slp_max'] = 8.0
scn[4]['slp_min'] = 5.2
scn[4]['time'] = '2019-04-28T15:51:09.025897'

scn[4]['_dis_id'] = 1 # used only to merge (dissolve) polygons for verification

In [33]:
ax = gdf.plot()
scn[0].plot(ax=ax, edgecolor='black', alpha=0.6)
scn[1].plot(ax=ax, edgecolor='black', alpha=0.6)
scn[2].plot(ax=ax, edgecolor='black', alpha=0.6)
scn[3].plot(ax=ax, edgecolor='black', alpha=0.6)
scn[4].plot(ax=ax, edgecolor='black', alpha=0.6)


Out[33]:
<matplotlib.axes._subplots.AxesSubplot at 0x29d78cac860>

In [34]:
for df in [gdf, scn[0], scn[1]]:
    print(df['_name'], df['area'], df.crs)


0    Original
Name: _name, dtype: object 0    10394.8943
Name: area, dtype: float64 {'init': 'epsg:4326'}
0    Scenario 1
Name: _name, dtype: object 0    17280.273674
Name: area, dtype: float64 {'init': 'epsg:4326'}
0    Scenario 2
Name: _name, dtype: object 0    17281.096651
Name: area, dtype: float64 {'init': 'epsg:4326'}

In [35]:
test_scns = pd.concat([scn[0], scn[1], scn[2], scn[3], scn[4]])
new_dir = '../data/scns'
if not os.path.exists(new_dir):
    os.mkdir(new_dir)
test_scns.drop(['_detection_date', '_reference_date', '_dis_id'], axis=1).to_file(filename='../data/scns/test_scns.shp')

dissolved_scns = test_scns.dissolve(by='_dis_id')


C:\Anaconda3\envs\APS\lib\site-packages\geopandas\io\file.py:108: FionaDeprecationWarning: Use fiona.Env() instead.
  with fiona.drivers():

In [36]:
for i in range(5):
    new_dir = '../data/scn_{0}'.format(i)
    if not os.path.exists(new_dir):
        os.mkdir(new_dir)
    scn[i].drop(['_detection_date', '_reference_date', '_dis_id'], axis=1).to_file(filename='../data/scn_{0}/scn_{0}.shp'.format(i, i))


C:\Anaconda3\envs\APS\lib\site-packages\geopandas\io\file.py:108: FionaDeprecationWarning: Use fiona.Env() instead.
  with fiona.drivers():
C:\Anaconda3\envs\APS\lib\site-packages\geopandas\io\file.py:108: FionaDeprecationWarning: Use fiona.Env() instead.
  with fiona.drivers():
C:\Anaconda3\envs\APS\lib\site-packages\geopandas\io\file.py:108: FionaDeprecationWarning: Use fiona.Env() instead.
  with fiona.drivers():
C:\Anaconda3\envs\APS\lib\site-packages\geopandas\io\file.py:108: FionaDeprecationWarning: Use fiona.Env() instead.
  with fiona.drivers():
C:\Anaconda3\envs\APS\lib\site-packages\geopandas\io\file.py:108: FionaDeprecationWarning: Use fiona.Env() instead.
  with fiona.drivers():