In [1]:
import os
import psycopg2 as ppg
import numpy as np
import ast
from osgeo import ogr
import shapely as sp
from shapely.geometry import Point,Polygon,asShape
from shapely.wkt import loads as wkt_loads
from shapely import speedups
import cartopy as cp
import cartopy.crs as ccrs
import pandas as pd
import pandas.io.sql as pdsql
import geopandas as gp
from matplotlib.collections import PatchCollection
from mpl_toolkits.basemap import Basemap
import fiona
from descartes import PolygonPatch
import matplotlib.pyplot as plt
%matplotlib inline
In [2]:
speedups.available
Out[2]:
In [3]:
speedups.enable()
In [4]:
# PGPASSWORD=pointy_cloudy psql -h localhost -d pcbm_pc -U pcbm
pg_connection = "dbname=pcbm_pc user=pcbm password=pointy_cloudy host=130.56.244.246"
conn = ppg.connect(pg_connection)
cursor = conn.cursor()
In [5]:
bbox_query = 'SELECT ST_Extent(pa)::geometry(Polygon, 28355) as table_extent FROM ga_ld_pc;'
%time bbox = gp.read_postgis(bbox_query, conn)
In [6]:
#blocks_query = "SELECT pa::geometry(Polygon, 28355) AS geom, PC_PatchAvg(pa, 'Z') AS elevation, id FROM act_patches;"
#blocks_query = "SELECT st_astext(PC_envelope(pa)::geometry(Polygon, 28355)) AS geom, PC_PatchAvg(pa, 'Z') AS elevation, id FROM ga_ld_pc;"
blocks_query = "select PC_envelope(pa)::geometry(Polygon, 28355) AS geom, PC_PatchAvg(pa, 'Z') AS elevation from ga_ld_pc limit 1000;"
%time blocks = gp.read_postgis(blocks_query, conn)
In [7]:
blocks.size
Out[7]:
In [8]:
blocks.head()
Out[8]:
In [9]:
polys = gp.GeoSeries(blocks.geom)
patches = gp.GeoDataFrame({'geometry': polys, 'elevation': blocks.elevation})
In [10]:
patches.head()
Out[10]:
In [ ]:
blocks_query = "SELECT pa::geometry(Polygon, 28355) AS geom, PC_PatchAvg(pa, 'Z') AS elevation, id FROM ga_ld_pc where PC_PatchAvg(pa, 'Z') > 400;"
%time thepatches = gp.read_postgis(blocks_query, conn)
In [ ]:
thepatches.head()
In [ ]:
%time highpatches = thepatches.query('elevation > 820')
highpatches.head()
In [ ]:
highpatches.plot(column='elevation',colormap='BrBG')
In [ ]:
points_query = "with pts as(SELECT PC_Explode(pa) as pt FROM act_patches where PC_PatchAvg(pa, 'Z') > 820 ) select st_astext(pt::geometry) from pts;"
#get raw point data, not as a geometry
#points_query = "SELECT PC_astext(PC_Explode(pa)) as pt FROM act_patches where PC_PatchAvg(pa, 'Z') > 700 ;"
%time pts = pdsql.read_sql(points_query, conn)
# point storage schema:
# 1 = intens, 2 = ReturnNo, 3 = Numreturns, 4 = scandirectionflag, 5 = edgeofflightline
# 6 = classification (ASPRS), 7 = scananglerank, 8 = user data, 9 = pointsourceID
# 10 = R, 11 = G, 12 = B, 13 = GPSTime, 14 = X, 15 = Y, 16 = Z
In [ ]:
#how many points did we get?
pts.size
In [ ]:
#had to check the schema to find point order...
schema_query = "SELECT * FROM pointcloud_formats where pcid = 4;"
schm = pdsql.read_sql(schema_query, conn)
print(schm.schema)
In [ ]:
pts.head()
In [ ]:
thepoints = []
for point in pts.st_astext:
this = wkt_loads(point)
thepoints.append([this.x,this.y,this.z])
In [ ]:
thepoints = np.squeeze(thepoints)
In [ ]:
plt.scatter(thepoints[:,0], thepoints[:,1], c = thepoints[:,2], lw=0, s=5, cmap='BrBG')
In [ ]:
fig = plt.figure()
fig.set_size_inches(25/2.51, 25/2.51)
BLUE = '#6699cc'
RED = '#cc6699'
ax = fig.gca()
ax.scatter(thepoints[:,0], thepoints[:,1], c = thepoints[:,2], lw=0, s=3, cmap='BrBG')
for patch in thepatches.geom:
ax.add_patch(PolygonPatch(patch, fc=BLUE, ec=BLUE, alpha=0.2, zorder=2 ))
for patch in highpatches.geom:
ax.add_patch(PolygonPatch(patch, fc=BLUE, ec=RED, alpha=0.2, zorder=2 ))
In [ ]:
#ASPRS class 6 - buildings
bldng_query = "WITH filtered_patch AS (SELECT PC_FilterEquals(pa, 'Classification', 6) as f_patch FROM act_patches where PC_PatchAvg(pa, 'Z') > 820) SELECT st_astext(point::geometry) FROM filtered_patch, pc_explode(f_patch) AS point;"
%time bld_pts = pdsql.read_sql(bldng_query, conn)
In [ ]:
bld_pts.head()
bldpoints = []
for point in bld_pts.st_astext:
this = wkt_loads(point)
bldpoints.append([this.x,this.y,this.z])
bldpoints = np.squeeze(bldpoints)
In [ ]:
#ASPRS class 2 - ground
grnd_query = "WITH filtered_patch AS (SELECT PC_FilterEquals(pa, 'Classification', 2) as f_patch FROM act_patches where PC_PatchAvg(pa, 'Z') > 820) SELECT st_astext(point::geometry) FROM filtered_patch, pc_explode(f_patch) AS point;"
%time grnd_pts = pdsql.read_sql(grnd_query, conn)
In [ ]:
grnd_pts.head()
grndpoints = []
for point in grnd_pts.st_astext:
this = wkt_loads(point)
grndpoints.append([this.x,this.y,this.z])
grndpoints = np.squeeze(grndpoints)
In [ ]:
#ASPRS class 5 - high vegetation
hv_query = "WITH filtered_patch AS (SELECT PC_FilterEquals(pa, 'Classification', 5) as f_patch FROM ga_ld_pc where PC_PatchAvg(pa, 'Z') > 40) SELECT st_astext(point::geometry) FROM filtered_patch, pc_explode(f_patch) AS point;"
%time hv_pts = pdsql.read_sql(hv_query, conn)
In [ ]:
#ASPRS class 5 - high vegetation
hv_query = "WITH filtered_patch AS (SELECT PC_FilterEquals(pa, 'Classification', 5) as f_patch FROM ga_ld_pc where PC_Intersection(pa, 'SRID=4326;POLYGON((-126.451 45.552, -126.42 47.55, -126.40 45.552, -126.451 45.552))'::geometry
) SELECT st_astext(point::geometry) FROM filtered_patch, pc_explode(f_patch) AS point;"
%time hv_pts = pdsql.read_sql(hv_query, conn)
In [ ]:
hv_pts.head()
hvpoints = []
for point in hv_pts.st_astext:
this = wkt_loads(point)
hvpoints.append([this.x,this.y,this.z])
hvpoints = np.squeeze(hvpoints)
In [ ]:
fig = plt.figure()
fig.set_size_inches(25/2.51, 25/2.51)
BLUE = '#6699cc'
RED = '#cc6699'
ax = fig.gca()
ax.scatter(grndpoints[:,0], grndpoints[:,1], c = grndpoints[:,2], lw=0, s=3, cmap='plasma')
ax.scatter(bldpoints[:,0], bldpoints[:,1], c = bldpoints[:,2], lw=0, s=3, cmap='viridis')
ax.scatter(hvpoints[:,0], hvpoints[:,1], c = hvpoints[:,2], lw=0, s=3, cmap='BrBG')
for patch in thepatches.geom:
ax.add_patch(PolygonPatch(patch, fc=BLUE, ec=BLUE, alpha=0.2, zorder=2 ))
for patch in highpatches.geom:
ax.add_patch(PolygonPatch(patch, fc=BLUE, ec=RED, alpha=0.2, zorder=2 ))
In [ ]:
#set up for 3d plots
from mpl_toolkits.mplot3d import Axes3D
import matplotlib.pylab as pylab
In [ ]:
plt_az=300
plt_elev = 50.
plt_s = 2
cb_fmt = '%.1f'
fig = plt.figure()
fig.set_size_inches(30/2.51, 25/2.51)
ax0 = fig.add_subplot(111, projection='3d')
ax0.scatter(grndpoints[:,0], grndpoints[:,1],grndpoints[:,2], c=np.ndarray.tolist(grndpoints[:,2]),\
lw=0, s=plt_s, cmap='plasma')
ax0.scatter(bldpoints[:,0], bldpoints[:,1],bldpoints[:,2], c=np.ndarray.tolist(bldpoints[:,2]),\
lw=0, s=plt_s, cmap='viridis')
ax0.scatter(hvpoints[:,0], hvpoints[:,1],hvpoints[:,2], c=np.ndarray.tolist(hvpoints[:,2]),\
lw=0, s=plt_s-1, cmap='BrBG')
In [ ]:
import vtk
In [ ]:
In [ ]:
In [ ]:
In [ ]:
In [ ]:
np.savetxt('ground_points.txt', grndpoints, delimiter=',')
np.savetxt('bld_points.txt', bldpoints, delimiter=',')
np.savetxt('hv_points.txt', hvpoints, delimiter=',')
National elevation maps - storing and managing many billions of points as a coherent dataset for precise elevation estimation. Also aiming to store provenance - if there's a data issue, we need more than just points. We need to figure out why the issue occurred, and fix it. We can also store things like point accuracy, some QC metrics, whatever point attributes we like! Or points from manifold sources:
In [ ]: