(a) Bounding box: everything inside the dashed lines from the map above.
In [1]:
RAs = dict(NERACOOS=[-72, 41, -69, 44],
NANOOS=[-127, 43, -123.75, 48],
SECOORA=[-87.40, 24.25, -74.70, 36.70],
)
bbox = RAs['SECOORA']
(b) Time span: past week days
In [2]:
import pytz
from datetime import datetime, timedelta
kw = dict(minute=0, second=0, microsecond=0, tzinfo=pytz.utc)
stop = datetime.utcnow()
stop = stop.replace(**kw)
start = stop - timedelta(days=6)
(c) variable name
In [3]:
sos_name = 'water_surface_height_above_reference_datum'
In [4]:
from pyoos.collectors.coops.coops_sos import CoopsSos
collector = CoopsSos()
collector.set_datum('NAVD')
collector.set_bbox(bbox)
collector.end_time = stop
collector.start_time = start
collector.variables = [sos_name]
In [5]:
ofrs = collector.server.offerings
title = collector.server.identification.title
fmt = '{:*^64}'.format
print(fmt(' Collector offerings '))
print('{}: {} offerings'.format(title, len(ofrs)))
In [6]:
from owslib.ows import ExceptionReport
try:
response = collector.raw()
except ExceptionReport as e:
print(e)
Let's change the filter to fix that.
In [7]:
collector.end_time = collector.start_time + timedelta(1)
response = collector.raw(responseFormat="text/csv")
Get data as Paegan CDM objects.
In [8]:
from lxml.etree import XMLSyntaxError
try:
collector.collect()
except XMLSyntaxError as e:
print(e)
Let's try the raw response then...
In [9]:
from io import BytesIO
from pandas import DataFrame, read_csv
collector.raw(responseFormat="text/csv")
df = read_csv(BytesIO(response),
parse_dates=True)
df.columns
Out[9]:
In [10]:
from io import BytesIO
from pandas import DataFrame, read_csv
collector.raw(responseFormat="text/csv")
df = read_csv(BytesIO(response),
parse_dates=True)
df.columns
Out[10]:
... and clean the table a little bit.
In [11]:
g = df.groupby('station_id')
# Aggregate by stations.
df = dict()
for station in g.groups.keys():
df.update({station: g.get_group(station).iloc[0]})
df = DataFrame.from_dict(df).T
# Add long names.
names = []
for sta in df.index:
names.extend([offering.description for
offering in collector.server.offerings if
sta == offering.name])
df['name'] = names
Collect each time-series.
In [12]:
observations = []
col = 'water_surface_height_above_reference_datum (m)'
print(fmt('Collecting'))
for k, row in df.iterrows():
station_id = row['station_id'].split(':')[-1]
print('{}: {}'.format(station_id, row['name']))
collector.features = [station_id]
response = collector.raw(responseFormat="text/csv")
kw = dict(parse_dates=True, index_col='date_time')
data = read_csv(BytesIO(response.encode('utf-8')), **kw).reset_index()
data = data.drop_duplicates(subset='date_time').set_index('date_time')
series = data[col]
series._metadata = [dict(name=row['name'],
station=row['station_id'],
sensor=row['sensor_id'],
lon=row['longitude (degree)'],
lat=row['latitude (degree)'],)]
observations.append(series)
In [13]:
import folium
import numpy as np
location = np.array(bbox).reshape(2, 2).mean(axis=0).tolist()[::-1]
tiles = ('http://services.arcgisonline.com/arcgis/rest/'
'services/Ocean/World_Ocean_Base/MapServer/tile/{z}/{y}/{x}')
mapa = folium.Map(location=location, zoom_start=6,
tiles=tiles, attr='ESRI')
In [14]:
from bokeh.plotting import figure
from bokeh.resources import CDN
from bokeh.embed import file_html
from folium.element import IFrame
def make_marker(series):
width, height = 500, 250
metadata = series._metadata[0]
p = figure(x_axis_type="datetime",
title=metadata['name'],
width=width, height=height)
p.line(series.index, series, line_width=2)
html = file_html(p, CDN, metadata['station'].split(':')[-1])
iframe = IFrame(html, width=width+40, height=height+80)
popup = folium.Popup(iframe, max_width=2650)
icon = folium.Icon(color='green', icon='stats')
marker = folium.Marker(location=[metadata['lat'], metadata['lon']],
popup=popup,
icon=icon)
return marker
for series in observations:
make_marker(series).add_to(mapa)
In [15]:
box = [[bbox[1], bbox[0]], [bbox[1], bbox[2]],
[bbox[3], bbox[2]], [bbox[3], bbox[0]],
[bbox[1], bbox[0]]]
folium.PolyLine(box, color='red').add_to(mapa)
mapa.fit_bounds(mapa.get_bounds())
mapa
Out[15]: