Some things about this exercise:
In [10]:
import os
import psycopg2 as ppg
import numpy as np
import ast
from osgeo import ogr
import shapely as sp
from shapely.geometry import Point,Polygon,asShape
from shapely.wkt import loads as wkt_loads
from shapely import speedups
import cartopy as cp
import cartopy.crs as ccrs
import pandas as pd
import pandas.io.sql as pdsql
import geopandas as gp
from matplotlib.collections import PatchCollection
from mpl_toolkits.basemap import Basemap
import fiona
from descartes import PolygonPatch
import matplotlib.pyplot as plt
%matplotlib inline
In [11]:
speedups.available
Out[11]:
In [12]:
speedups.enable()
In [13]:
# PGPASSWORD=pointy_cloudy psql -h localhost -d pcbm_pc -U pcbm
pg_connection = "dbname=pcbm_pc user=pcbm password=pointy_cloudy host=130.56.244.246"
conn = ppg.connect(pg_connection)
cursor = conn.cursor()
In [14]:
#blocks_query = "SELECT pa::geometry(Polygon, 28355) AS geom, PC_PatchAvg(pa, 'Z') AS elevation, id FROM act_patches;"
blocks_query = "SELECT st_astext(PC_envelope(pa)::geometry(Polygon, 28355)) AS geom, PC_PatchAvg(pa, 'Z') AS elevation, id FROM act8pt_16tiles_pc;"
%time blks = pdsql.read_sql(blocks_query, conn)
In [15]:
#how many points?
len(blks)
Out[15]:
In [16]:
len(blks)*5000
Out[16]:
In [17]:
blocks_query = "SELECT pa::geometry(Polygon, 28355) AS geom, PC_PatchAvg(pa, 'Z') AS elevation, id FROM act8pt_16tiles_pc where PC_PatchAvg(pa, 'Z') > 600;"
%time thepatches = gp.read_postgis(blocks_query, conn)
In [18]:
thepatches.head()
Out[18]:
In [19]:
blocks_query = "SELECT pa::geometry(Polygon, 28355) AS geom, PC_PatchAvg(pa, 'Z') AS elevation, id FROM act8pt_16tiles_pc where PC_PatchAvg(pa, 'Z') > 800;"
%time highpatches = gp.read_postgis(blocks_query, conn)
In [20]:
highpatches.plot(column='elevation',colormap='BrBG')
Out[20]:
In [21]:
points_query = "with pts as(SELECT PC_Explode(pa) as pt FROM act8pt_16tiles_pc where PC_PatchAvg(pa, 'Z') > 800 ) select st_astext(pt::geometry) from pts;"
#get raw point data, not as a geometry
#points_query = "SELECT PC_astext(PC_Explode(pa)) as pt FROM act_patches where PC_PatchAvg(pa, 'Z') > 700 ;"
%time pts = pdsql.read_sql(points_query, conn)
# point storage schema:
# 1 = intens, 2 = ReturnNo, 3 = Numreturns, 4 = scandirectionflag, 5 = edgeofflightline
# 6 = classification (ASPRS), 7 = scananglerank, 8 = user data, 9 = pointsourceID
# 10 = R, 11 = G, 12 = B, 13 = GPSTime, 14 = X, 15 = Y, 16 = Z
In [22]:
#how many points did we get?
pts.size
Out[22]:
In [23]:
#had to check the schema to find point order...
schema_query = "SELECT * FROM pointcloud_formats where pcid = 4;"
schm = pdsql.read_sql(schema_query, conn)
print(schm.schema)
In [24]:
pts.head()
Out[24]:
In [25]:
thepoints = []
for point in pts.st_astext:
this = wkt_loads(point)
thepoints.append([this.x,this.y,this.z])
In [26]:
thepoints = np.squeeze(thepoints)
In [27]:
plt.scatter(thepoints[:,0], thepoints[:,1], c = thepoints[:,2], lw=0, s=5, cmap='BrBG')
Out[27]:
In [28]:
fig = plt.figure()
fig.set_size_inches(25/2.51, 25/2.51)
BLUE = '#6699cc'
RED = '#cc6699'
ax = fig.gca()
ax.scatter(thepoints[:,0], thepoints[:,1], c = thepoints[:,2], lw=0, s=3, cmap='BrBG')
for patch in thepatches.geom:
ax.add_patch(PolygonPatch(patch, fc=BLUE, ec=BLUE, alpha=0.2, zorder=2 ))
for patch in thepatches.geom:
ax.add_patch(PolygonPatch(patch, fc=BLUE, ec=RED, alpha=0.2, zorder=2 ))
In [29]:
#ASPRS class 6 - buildings
bldng_query = "WITH filtered_patch AS (SELECT PC_FilterEquals(pa, 'Classification', 6) as f_patch FROM act8pt_16tiles_pc where PC_PatchAvg(pa, 'Z') > 800) SELECT st_astext(point::geometry) FROM filtered_patch, pc_explode(f_patch) AS point;"
%time bld_pts = pdsql.read_sql(bldng_query, conn)
In [30]:
bld_pts.head()
bldpoints = []
for point in bld_pts.st_astext:
this = wkt_loads(point)
bldpoints.append([this.x,this.y,this.z])
bldpoints = np.squeeze(bldpoints)
In [31]:
#ASPRS class 2 - ground
grnd_query = "WITH filtered_patch AS (SELECT PC_FilterEquals(pa, 'Classification', 2) as f_patch FROM act8pt_16tiles_pc where PC_PatchAvg(pa, 'Z') > 800) SELECT st_astext(point::geometry) FROM filtered_patch, pc_explode(f_patch) AS point;"
%time grnd_pts = pdsql.read_sql(grnd_query, conn)
In [32]:
grnd_pts.head()
grndpoints = []
for point in grnd_pts.st_astext:
this = wkt_loads(point)
grndpoints.append([this.x,this.y,this.z])
grndpoints = np.squeeze(grndpoints)
In [33]:
#ASPRS class 5 - high vegetation
hv_query = "WITH filtered_patch AS (SELECT PC_FilterEquals(pa, 'Classification', 5) as f_patch FROM act8pt_16tiles_pc where PC_PatchAvg(pa, 'Z') > 800) SELECT st_astext(point::geometry) FROM filtered_patch, pc_explode(f_patch) AS point;"
%time hv_pts = pdsql.read_sql(hv_query, conn)
In [34]:
hv_pts.head()
hvpoints = []
for point in hv_pts.st_astext:
this = wkt_loads(point)
hvpoints.append([this.x,this.y,this.z])
hvpoints = np.squeeze(hvpoints)
In [1]:
fig = plt.figure()
fig.set_size_inches(25/2.51, 25/2.51)
BLUE = '#6699cc'
RED = '#cc6699'
ax = fig.gca()
ax.scatter(grndpoints[:,0], grndpoints[:,1], c = grndpoints[:,2], lw=0, s=3, cmap='plasma')
ax.scatter(bldpoints[:,0], bldpoints[:,1], c = bldpoints[:,2], lw=0, s=3, cmap='viridis')
ax.scatter(hvpoints[:,0], hvpoints[:,1], c = hvpoints[:,2], lw=0, s=3, cmap='BrBG')
for patch in thepatches.geom:
ax.add_patch(PolygonPatch(patch, fc=BLUE, ec=BLUE, alpha=0.2, zorder=2 ))
for patch in highpatches.geom:
ax.add_patch(PolygonPatch(patch, fc=BLUE, ec=RED, alpha=0.2, zorder=2 ))
plt.savefig('act_16tile_overhead.png')
In [57]:
#set up for 3d plots
from mpl_toolkits.mplot3d import Axes3D
from mpl_toolkits.mplot3d.art3d import Poly3DCollection
import matplotlib.pylab as pylab
In [56]:
plt_az=300
plt_elev = 50.
plt_s = 2
cb_fmt = '%.1f'
fig = plt.figure()
fig.set_size_inches(30/2.51, 25/2.51)
ax0 = fig.add_subplot(111, projection='3d')
ax0.scatter(grndpoints[:,0], grndpoints[:,1],grndpoints[:,2], c=np.ndarray.tolist(grndpoints[:,2]),\
lw=0, s=plt_s, cmap='plasma')
ax0.scatter(bldpoints[:,0], bldpoints[:,1],bldpoints[:,2], c=np.ndarray.tolist(bldpoints[:,2]),\
lw=0, s=plt_s, cmap='viridis')
ax0.scatter(hvpoints[:,0], hvpoints[:,1],hvpoints[:,2], c=np.ndarray.tolist(hvpoints[:,2]),\
lw=0, s=plt_s-1, cmap='BrBG')
Out[56]:
In [ ]:
In [ ]:
In [ ]:
In [ ]:
In [31]:
#np.savetxt('ground_points_800.txt', grndpoints, delimiter=',')
#np.savetxt('bld_points_800.txt', bldpoints, delimiter=',')
#np.savetxt('hv_points_800.txt', hvpoints, delimiter=',')
National elevation maps - storing and managing many billions of points as a coherent dataset for precise elevation estimation. Also aiming to store provenance - if there's a data issue, we need more than just points. We need to figure out why the issue occurred, and fix it. We can also store things like point accuracy, some QC metrics, whatever point attributes we like! Or points from manifold sources:
In [ ]: