IOOS System Test: Extreme Events Theme: Coastal Inundation
This notebook is based on IOOS System Test: Inundation
Methodology:
In [1]:
import datetime as dt
from warnings import warn
import folium
from IPython.display import HTML
import iris
from iris.exceptions import CoordinateNotFoundError, ConstraintMismatchError
import matplotlib.pyplot as plt
import matplotlib.dates as mdates
from owslib.csw import CatalogueServiceWeb
from owslib import fes
import pandas as pd
from pyoos.collectors.coops.coops_sos import CoopsSos
import requests
from utilities import (fes_date_filter, coops2df, find_timevar, find_ij, nearxy, service_urls, mod_df,
get_coordinates, inline_map, get_Coops_longName)
In [2]:
bounding_box_type = "box"
# Bounding Box [lon_min, lat_min, lon_max, lat_max]
area = {'Hawaii': [-160.0, 18.0, -154., 23.0],
'Gulf of Maine': [-72.0, 41.0, -69.0, 43.0],
'New York harbor region': [-75., 39., -71., 41.5],
'Puerto Rico': [-75, 12, -55, 26],
'East Coast': [-77, 36, -73, 38],
'North West': [-130, 38, -121, 50]}
bounding_box = area['East Coast']
#temporal range
jd_now = dt.datetime.utcnow()
jd_start, jd_stop = jd_now - dt.timedelta(days=4), jd_now #+ dt.timedelta(days=3)
start_date = jd_start.strftime('%Y-%m-%d %H:00')
stop_date = jd_stop.strftime('%Y-%m-%d %H:00')
jd_start = dt.datetime.strptime(start_date, '%Y-%m-%d %H:%M')
jd_stop = dt.datetime.strptime(stop_date, '%Y-%m-%d %H:%M')
print start_date,'to',stop_date
In [3]:
#put the names in a dict for ease of access
data_dict = {}
sos_name = 'Currents'
data_dict['currents'] = {
"u_names":['eastward_sea_water_velocity_assuming_no_tide','surface_eastward_sea_water_velocity','*surface_eastward_sea_water_velocity*', 'eastward_sea_water_velocity'],
"v_names":['northward_sea_water_velocity_assuming_no_tide','surface_northward_sea_water_velocity','*surface_northward_sea_water_velocity*', 'northward_sea_water_velocity'],
"sos_name":['currents']}
In [4]:
# endpoint = 'http://geo.gov.ckan.org/csw' # data.gov
# endpoint = 'https://data.noaa.gov/csw' # data.noaa.gov
# endpoint = 'http://www.nodc.noaa.gov/geoportal/csw' # nodc
endpoint = 'http://www.ngdc.noaa.gov/geoportal/csw' # NGDC Geoportal
csw = CatalogueServiceWeb(endpoint,timeout=60)
for oper in csw.operations:
if oper.name == 'GetRecords':
cnstr = oper.constraints['SupportedISOQueryables']['values']
print('\nISO Queryables:%s\n' % '\n'.join(cnstr))
In [5]:
# convert User Input into FES filters
start,stop = fes_date_filter(start_date,stop_date)
bbox = fes.BBox(bounding_box)
#use the search name to create search filter
or_filt = fes.Or([fes.PropertyIsLike(propertyname='apiso:AnyText',literal=('*%s*' % val),
escapeChar='\\',wildCard='*',singleChar='?') for val in data_dict['currents']['u_names']])
val = 'Averages'
not_filt = fes.Not([fes.PropertyIsLike(propertyname='apiso:AnyText',
literal=('*%s*' % val),
escapeChar='\\',
wildCard='*',
singleChar='?')])
filter_list = [fes.And([ bbox, start, stop, or_filt, not_filt]) ]
# connect to CSW, explore it's properties
# try request using multiple filters "and" syntax: [[filter1,filter2]]
csw.getrecords2(constraints=filter_list, maxrecords=1000, esn='full')
print str(len(csw.records)) + " csw records found"
for rec, item in csw.records.items():
print(item.title)
Dap URLS
In [6]:
dap_urls = service_urls(csw.records)
#remove duplicates and organize
dap_urls = sorted(set(dap_urls))
print "\n".join(dap_urls)
SOS URLs
In [7]:
sos_urls = service_urls(csw.records,service='sos:url')
#Add known NDBC SOS
sos_urls.append("http://sdf.ndbc.noaa.gov/sos/server.php") #?request=GetCapabilities&service=SOS
sos_urls = sorted(set(sos_urls))
print "Total SOS:",len(sos_urls)
print "\n".join(sos_urls)
In [8]:
start_time = dt.datetime.strptime(start_date,'%Y-%m-%d %H:%M')
end_time = dt.datetime.strptime(stop_date,'%Y-%m-%d %H:%M')
iso_start = start_time.strftime('%Y-%m-%dT%H:%M:%SZ')
iso_end = end_time.strftime('%Y-%m-%dT%H:%M:%SZ')
In [9]:
collector = CoopsSos()
collector.start_time = start_time
collector.end_time = end_time
collector.variables = data_dict["currents"]["sos_name"]
collector.server.identification.title
print collector.start_time,":", collector.end_time
ofrs = collector.server.offerings
In [10]:
print "Date: ",iso_start," to ", iso_end
box_str=','.join(str(e) for e in bounding_box)
print "Lat/Lon Box: ",box_str
url = (('http://opendap.co-ops.nos.noaa.gov/ioos-dif-sos/SOS?'
'service=SOS&request=GetObservation&version=1.0.0&'
'observedProperty=%s&bin=1&'
'offering=urn:ioos:network:NOAA.NOS.CO-OPS:CurrentsActive&'
'featureOfInterest=BBOX:%s&responseFormat=text/csv') % (sos_name, box_str))
print url
obs_loc_df = pd.read_csv(url)
In [11]:
obs_loc_df = obs_loc_df.loc[obs_loc_df['bin (count)']==1,:]
obs_loc_df.head()
Out[11]:
In [12]:
# Index the data frame to filter repeats by bin #
stations = [sta.split(':')[-1] for sta in obs_loc_df['station_id']]
obs_lon = [sta for sta in obs_loc_df['longitude (degree)']]
obs_lat = [sta for sta in obs_loc_df['latitude (degree)']]
In [13]:
obs_df = []
current_speed_df = []
sta_names = []
sta_failed = []
for sta in stations:
try:
df = coops2df(collector, sta, sos_name, iso_start, iso_end)
except Exception as e:
print "Error" + str(e)
continue
name = df.name
sta_names.append(name)
# if df.empty:
# sta_failed.append(name)
# df = DataFrame(np.arange(len(ts)) * np.NaN, index=ts.index, columns=['Observed Data'])
# df.name = name
obs_df.append(df)
obs_df[-1].name = name
# Create a separate dataframe for only sea water speed
current_speed_df.append(pd.DataFrame(df['sea_water_speed (cm/s)']))
current_speed_df[-1].name = name
In [14]:
for df in obs_df:
num_bins = df['number_of_bins'][0]
depth = df['bin_distance (m)'].values[0:num_bins]
time = df.loc[df['bin (count)']==(1)].index.values
xdates = [dt.datetime.strptime(str(date).split('.')[0],'%Y-%m-%dT%H:%M:%S') for date in time]
dates = mdates.date2num(xdates)
# Extract data from each depth bin to create a 2D matrix of current speeds (depth x time)
data = np.zeros((num_bins, len(df.index)/num_bins))
for n in range(num_bins):
data[n,:] = df.loc[df['bin (count)']==(n+1),'sea_water_speed (cm/s)'].values
fig, ax = plt.subplots(figsize=(12, 6))
ax.xaxis.set_major_formatter(mdates.DateFormatter('%m/%d/%Y'))
ax.xaxis.set_major_locator(mdates.DayLocator())
im = ax.pcolor(dates, depth, data, vmin=abs(data).min(), vmax=abs(data).max())
cb = fig.colorbar(im, ax=ax)
ax.set_ylabel = 'bin_distance (m)'
ax.set_title = df.name
In [15]:
# break the current speed data frame into data frames by bin
for df in obs_df:
fig, axes = plt.subplots(1, 1, figsize=(20,6))
# Only plot the first bin
axes = df.loc[df['bin (count)']==(1),'sea_water_speed (cm/s)'].plot(title=df.name, legend=False, color='b')
plt.setp(axes.lines[0], linewidth=1.0, zorder=1)
axes.set_ylabel('Current Speed (cm/s)')
for tl in axes.get_yticklabels():
tl.set_color('b')
axes.yaxis.label.set_color('blue')
ax2 = axes.twinx()
axes2 = df.loc[df['bin (count)']==(1),'direction_of_sea_water_velocity (degree)'].plot(title=df.name, legend=False, ax=ax2, color='g')
plt.setp(axes.lines[0], linewidth=1.0, zorder=1)
axes2.set_ylabel('Current Direction (degrees)')
for tl in ax2.get_yticklabels():
tl.set_color('g')
axes2.yaxis.label.set_color('green')