IOOS System Test: Extreme Events Theme: Inundation

Compare modeled water levels with observations for a specified bounding box and time period using IOOS recommended service standards for catalog search (CSW) and data retrieval (OPeNDAP & SOS).

  • Query CSW to find datasets that match criteria
  • Extract OPeNDAP data endpoints from model datasets and SOS endpoints from observational datasets
  • OPeNDAP model datasets will be granules
  • SOS endpoints may be datasets (from ncSOS) or collections of datasets (from NDBC, CO-OPS SOS servers)
  • Filter SOS services to obtain datasets
  • Extract data from SOS datasets
  • Extract data from model datasets at locations of observations
  • Compare time series data on same vertical datum

In [1]:
from pylab import *
from owslib.csw import CatalogueServiceWeb
from owslib import fes
import random
import netCDF4
import pandas as pd
import datetime as dt
from pyoos.collectors.coops.coops_sos import CoopsSos
import cStringIO
import iris
import urllib2
import parser
from lxml import etree
import cartopy.crs as ccrs
import cartopy.feature as cfeature
from cartopy.io.img_tiles import MapQuestOpenAerial, MapQuestOSM, OSM

Specify a time range and bounding box of interest:


In [2]:
# specific specific times (UTC) ...

# hurricane sandy
jd_start = dt.datetime(2012,10,26)
jd_stop = dt.datetime(2012,11,2)

# 2014 feb 10-15 storm
jd_start = dt.datetime(2014,2,10)
jd_stop = dt.datetime(2014,2,15)

# 2014 recent
jd_start = dt.datetime(2014,3,8)
jd_stop = dt.datetime(2014,3,11)

# 2011 
#jd_start = dt.datetime(2013,4,20)
#jd_stop = dt.datetime(2013,4,24)

# ... or relative to now
jd_now = dt.datetime.utcnow()
jd_start = jd_now - dt.timedelta(days=3)
jd_stop = jd_now + dt.timedelta(days=3)

start_date = jd_start.strftime('%Y-%m-%d %H:00')
stop_date  = jd_stop.strftime('%Y-%m-%d %H:00')

jd_start = dt.datetime.strptime(start_date,'%Y-%m-%d %H:%M')
jd_stop = dt.datetime.strptime(stop_date,'%Y-%m-%d %H:%M')

print start_date,'to',stop_date


2014-04-20 08:00 to 2014-04-26 08:00

In [3]:
# Bounding Box [lon_min, lat_min, lon_max, lat_max]
box=[-75., 39., -71., 41.5]  # new york harbor region
#box=[-72.0, 41.0, -69.0, 43.0]   # gulf of maine
#box=[-160.0, 18.0, -154., 23.0] #hawaii

Now we need to specify all the names we know for water level, names that will get used in the CSW search, and also to find data in the datasets that are returned. This is ugly and fragile. There hopefully will be a better way in the future...


In [4]:
name_list=['water_surface_height_above_reference_datum',
    'sea_surface_height_above_geoid','sea_surface_elevation',
    'sea_surface_height_above_reference_ellipsoid','sea_surface_height_above_sea_level',
    'sea_surface_height','water level']

sos_name = 'water_surface_height_above_reference_datum'

Search CSW for datasets of interest


In [5]:
#from IPython.core.display import HTML
#HTML('<iframe src=http://www.ngdc.noaa.gov/geoportal/ width=950 height=400></iframe>')

In [6]:
# connect to CSW, explore it's properties

endpoint = 'http://www.ngdc.noaa.gov/geoportal/csw' # NGDC Geoportal
#endpoint = 'http://geoport.whoi.edu/geoportal/csw'  # USGS WHSC Geoportal

#endpoint = 'http://www.nodc.noaa.gov/geoportal/csw'   # NODC Geoportal: granule level
#endpoint = 'http://data.nodc.noaa.gov/geoportal/csw'  # NODC Geoportal: collection level   
#endpoint = 'http://geodiscover.cgdi.ca/wes/serviceManagerCSW/csw'  # NRCAN CUSTOM
#endpoint = 'http://geoport.whoi.edu/gi-cat/services/cswiso' # USGS Woods Hole GI_CAT
#endpoint = 'http://cida.usgs.gov/gdp/geonetwork/srv/en/csw' # USGS CIDA Geonetwork
#endpoint = 'http://cmgds.marine.usgs.gov/geonetwork/srv/en/csw' # USGS Coastal and Marine Program
#endpoint = 'http://geoport.whoi.edu/geoportal/csw' # USGS Woods Hole Geoportal 
#endpoint = 'http://geo.gov.ckan.org/csw'  # CKAN testing site for new Data.gov
#endpoint = 'https://edg.epa.gov/metadata/csw'  # EPA
#endpoint = 'http://cwic.csiss.gmu.edu/cwicv1/discovery'  # CWIC

csw = CatalogueServiceWeb(endpoint,timeout=60)
csw.version


Out[6]:
'2.0.2'

In [7]:
for oper in csw.operations:
    if oper.name == 'GetRecords':
        print '\nISO Queryables:\n',oper.constraints['SupportedISOQueryables']['values']


ISO Queryables:
['apiso:Subject', 'apiso:Title', 'apiso:Abstract', 'apiso:AnyText', 'apiso:Format', 'apiso:Identifier', 'apiso:Modified', 'apiso:Type', 'apiso:BoundingBox', 'apiso:CRS.Authority', 'apiso:CRS.ID', 'apiso:CRS.Version', 'apiso:RevisionDate', 'apiso:AlternateTitle', 'apiso:CreationDate', 'apiso:PublicationDate', 'apiso:OrganizationName', 'apiso:HasSecurityConstraints', 'apiso:Language', 'apiso:ResourceIdentifier', 'apiso:ParentIdentifier', 'apiso:KeywordType', 'apiso:TopicCategory', 'apiso:ResourceLanguage', 'apiso:GeographicDescriptionCode', 'apiso:Denominator', 'apiso:DistanceValue', 'apiso:DistanceUOM', 'apiso:TempExtent_begin', 'apiso:TempExtent_end', 'apiso:ServiceType', 'apiso:ServiceTypeVersion', 'apiso:Operation', 'apiso:OperatesOn', 'apiso:OperatesOnIdentifier', 'apiso:OperatesOnName', 'apiso:CouplingType']

In [8]:
# hopefully something like this will be implemented in fes soon
def dateRange(start_date='1900-01-01',stop_date='2100-01-01',constraint='overlaps'):
    if constraint == 'overlaps':
        start = fes.PropertyIsLessThanOrEqualTo(propertyname='apiso:TempExtent_begin', literal=stop_date)
        stop = fes.PropertyIsGreaterThanOrEqualTo(propertyname='apiso:TempExtent_end', literal=start_date)
    elif constraint == 'within':
        start = fes.PropertyIsGreaterThanOrEqualTo(propertyname='apiso:TempExtent_begin', literal=start_date)
        stop = fes.PropertyIsLessThanOrEqualTo(propertyname='apiso:TempExtent_end', literal=stop_date)
    return start,stop

In [8]:


In [9]:
stop_date


Out[9]:
'2014-04-26 08:00'

In [10]:
# convert User Input into FES filters
start,stop = dateRange(start_date,stop_date)
bbox = fes.BBox(box)

In [11]:
or_filt = fes.Or([fes.PropertyIsLike(propertyname='apiso:AnyText',literal=('*%s*' % val),
                    escapeChar='\\',wildCard='*',singleChar='?') for val in name_list])

ROMS model output often has Averages and History files. The Averages files are usually averaged over a tidal cycle or more, while the History files are snapshots at that time instant. We are not interested in averaged data for this test, so in the cell below we remove any Averages files here by removing any datasets that have the term "Averages" in the metadata text. A better approach would be to look at the cell_methods attributes propagated through to some term in the ISO metadata, but this is not implemented yet, as far as I know


In [12]:
val = 'Averages'
not_filt = fes.Not([fes.PropertyIsLike(propertyname='apiso:AnyText',literal=('*%s*' % val),
                        escapeChar='\\',wildCard='*',singleChar='?')])

In [13]:
filter_list = [fes.And([ bbox, start, stop, or_filt, not_filt]) ]

In [14]:
# try request using multiple filters "and" syntax: [[filter1,filter2]]
csw.getrecords2(constraints=filter_list,maxrecords=1000,esn='full')
print len(csw.records.keys())


16

Now print out some titles


In [15]:
for rec,item in csw.records.iteritems():
    print item.title


NYHOPS Forecast Model Results
ROMS ESPRESSO Real-Time Operational IS4DVAR Forecast System Version 2 (NEW) 2013-present FMRC History (Best)
NECOFS GOM3 (FVCOM) - Northeast US - Latest Forecast
NECOFS GOM3 Wave - Northeast US - Latest Forecast
DBOFS - Delaware Bay Operational Forecast System - NOAA CO-OPS - POM
NYOFS - New York and New Jersey Operational Forecast System - NOAA CO-OPS - POM
COAWST Forecast System : USGS : US East Coast and Gulf of Mexico (Experimental)
Barotropic Tide Model for the Pacific Basin
CBOFS - Chesapeake Bay Operational Forecast System - NOAA CO-OPS - POM
ESTOFS Storm Surge Model - Atlantic - v1.0.0 - NOAA - NCEP - ADCIRC
TEST: NOAA.NOS.CO-OPS SOS
NECOFS Massachusetts (FVCOM) - Massachusetts Coastal - Latest Forecast
HYbrid Coordinate Ocean Model (HYCOM): Global
HRECOS Aggregated Station HRWSTPTH Data
HRECOS Aggregated Station HRPIER84 Data
HRECOS Aggregated Station HRPMNTH Data

Define a function that will return the endpoint for a specified service type


In [16]:
def service_urls(records,service_string='urn:x-esri:specification:ServiceType:odp:url'):
    """
    extract service_urls of a specific type (DAP, SOS) from records
    """
    urls=[]
    for key,rec in records.iteritems():
        #create a generator object, and iterate through it until the match is found
        #if not found, gets the default value (here "none")
        url = next((d['url'] for d in rec.references if d['scheme'] == service_string), None)
        if url is not None:
            urls.append(url)
    return urls

Print out all the OPeNDAP Data URL endpoints


In [17]:
dap_urls = service_urls(csw.records,service_string='urn:x-esri:specification:ServiceType:odp:url')
print "\n".join(dap_urls)


http://colossus.dl.stevens-tech.edu/thredds/dodsC/latest/Complete_gcmplt.nc
http://tds.marine.rutgers.edu/thredds/dodsC/roms/espresso/2013_da/his_Best/ESPRESSO_Real-Time_v2_History_Best_Available_best.ncd
http://www.smast.umassd.edu:8080/thredds/dodsC/FVCOM/NECOFS/Forecasts/NECOFS_GOM3_FORECAST.nc
http://www.smast.umassd.edu:8080/thredds/dodsC/FVCOM/NECOFS/Forecasts/NECOFS_WAVE_FORECAST.nc
http://opendap.co-ops.nos.noaa.gov/thredds/dodsC/DBOFS/fmrc/Aggregated_7_day_DBOFS_Fields_Forecast_best.ncd
http://opendap.co-ops.nos.noaa.gov/thredds/dodsC/NYOFS/fmrc/Aggregated_7_day_NYOFS_Fields_Forecast_best.ncd
http://geoport.whoi.edu/thredds/dodsC/coawst_4/use/fmrc/coawst_4_use_best.ncd
http://oos.soest.hawaii.edu/thredds/dodsC/hioos/tide_pac
http://opendap.co-ops.nos.noaa.gov/thredds/dodsC/CBOFS/fmrc/Aggregated_7_day_CBOFS_Fields_Forecast_best.ncd
http://geoport-dev.whoi.edu/thredds/dodsC/estofs/atlantic
http://www.smast.umassd.edu:8080/thredds/dodsC/FVCOM/NECOFS/Forecasts/NECOFS_FVCOM_OCEAN_MASSBAY_FORECAST.nc
http://oos.soest.hawaii.edu/thredds/dodsC/pacioos/hycom/global
http://sos.maracoos.org/stable/dodsC/stationHRWSTPTH-agg.ncml
http://sos.maracoos.org/stable/dodsC/stationHRPIER84-agg.ncml
http://sos.maracoos.org/stable/dodsC/stationHRPMNTH-agg.ncml

Print out all the SOS Data URL endpoints


In [18]:
sos_urls = service_urls(csw.records,service_string='urn:x-esri:specification:ServiceType:sos:url')
print "\n".join(sos_urls)


http://opendap.co-ops.nos.noaa.gov/ioos-dif-sos/SOS?service=SOS&request=GetCapabilities

In [19]:
def nearxy(x,y,xi,yi):
    """
    find the indices x[i] of arrays (x,y) closest to the points (xi,yi)
    """
    ind=ones(len(xi),dtype=int)
    dd=ones(len(xi),dtype='float')
    for i in arange(len(xi)):
        dist=sqrt((x-xi[i])**2+(y-yi[i])**2)
        ind[i]=dist.argmin()
        dd[i]=dist[ind[i]]
    return ind,dd

In [20]:
def find_ij(x,y,d,xi,yi):
    """
    find non-NaN cell d[j,i] that are closest to points (xi,yi).
    """
    index = where(~isnan(d.flatten()))[0]
    ind,dd = nearxy(x.flatten()[index],y.flatten()[index],xi,yi)
    j,i=ind2ij(x,index[ind])
    return i,j,dd

In [21]:
def find_timevar(cube):
    """
    return the time variable from Iris. This is a workaround for
    Iris having problems with FMRC aggregations, which produce two time coordinates
    """
    try:
        cube.coord(axis='T').rename('time')
    except:
        pass
    timevar = cube.coord('time')
    return timevar

In [22]:
def ind2ij(a,index):
    """
    returns a[j,i] for a.ravel()[index]
    """
    n,m = shape(lon)
    j = ceil(index/m).astype(int)
    i = remainder(index,m)
    return i,j

1. Get observations from SOS

Here we are using a custom class from pyoos to read the CO-OPS SOS. This is definitely unsavory, as the whole point of using a standard is avoid the need for custom classes for each service. Need to examine the consequences of removing this and just going with straight SOS service using OWSLib.


In [23]:
collector = CoopsSos()
collector.set_datum('NAVD')
collector.set_datum('MSL')

In [24]:
collector.server.identification.title


Out[24]:
'NOAA.NOS.CO-OPS SOS'

In [25]:
collector.start_time = jd_start
collector.end_time = jd_stop
collector.variables = [sos_name]

In [26]:
ofrs = collector.server.offerings

In [27]:
print len(ofrs)
for p in ofrs[700:710]: print p


1025
Offering id: station-8411060, name: urn:ioos:station:NOAA.NOS.CO-OPS:8411060
Offering id: station-8413320, name: urn:ioos:station:NOAA.NOS.CO-OPS:8413320
Offering id: station-8418150, name: urn:ioos:station:NOAA.NOS.CO-OPS:8418150
Offering id: station-8419317, name: urn:ioos:station:NOAA.NOS.CO-OPS:8419317
Offering id: station-8423898, name: urn:ioos:station:NOAA.NOS.CO-OPS:8423898
Offering id: station-8443970, name: urn:ioos:station:NOAA.NOS.CO-OPS:8443970
Offering id: station-8447386, name: urn:ioos:station:NOAA.NOS.CO-OPS:8447386
Offering id: station-8447387, name: urn:ioos:station:NOAA.NOS.CO-OPS:8447387
Offering id: station-8447435, name: urn:ioos:station:NOAA.NOS.CO-OPS:8447435
Offering id: station-8447930, name: urn:ioos:station:NOAA.NOS.CO-OPS:8447930

Find the SOS stations within our bounding box and time extent

We would like to just use a filter on a collection to get a new collection, but PYOOS doesn't do that yet. So we do a GetObservation request for a collection, including a bounding box, and asking for one value at the start of the time period of interest. We use that to do a bounding box filter on the SOS server, which returns 1 point for each station found. So for 3 stations, we get back 3 records, in CSV format. We can strip the station ids from the CSV, and then we have a list of stations we can use with pyoos. The template for the GetObservation query for the bounding box filtered collection was generated using the GUI at http://opendap.co-ops.nos.noaa.gov/ioos-dif-sos/


In [28]:
iso_start = jd_start.strftime('%Y-%m-%dT%H:%M:%SZ')
print iso_start
box_str=','.join(str(e) for e in box)
print box_str


2014-04-20T08:00:00Z
-75.0,39.0,-71.0,41.5

In [29]:
url=('http://opendap.co-ops.nos.noaa.gov/ioos-dif-sos/SOS?'
     'service=SOS&request=GetObservation&version=1.0.0&'
     'observedProperty=%s&offering=urn:ioos:network:NOAA.NOS.CO-OPS:WaterLevelActive&'
     'featureOfInterest=BBOX:%s&responseFormat=text/csv&eventTime=%s') % (sos_name,box_str,iso_start)
print url
obs_loc_df = pd.read_csv(url)


http://opendap.co-ops.nos.noaa.gov/ioos-dif-sos/SOS?service=SOS&request=GetObservation&version=1.0.0&observedProperty=water_surface_height_above_reference_datum&offering=urn:ioos:network:NOAA.NOS.CO-OPS:WaterLevelActive&featureOfInterest=BBOX:-75.0,39.0,-71.0,41.5&responseFormat=text/csv&eventTime=2014-04-20T08:00:00Z

In [30]:
obs_loc_df.head()


Out[30]:
station_id sensor_id latitude (degree) longitude (degree) date_time water_surface_height_above_reference_datum (m) datum_id vertical_position (m)
0 urn:ioos:station:NOAA.NOS.CO-OPS:8461490 urn:ioos:sensor:NOAA.NOS.CO-OPS:8461490:A1 41.3614 -72.0900 2014-04-20T08:00:00Z 0.598 urn:ioos:def:datum:noaa::MLLW 1.074
1 urn:ioos:station:NOAA.NOS.CO-OPS:8465705 urn:ioos:sensor:NOAA.NOS.CO-OPS:8465705:A1 41.2833 -72.9083 2014-04-20T08:00:00Z 1.993 urn:ioos:def:datum:noaa::MLLW 5.618
2 urn:ioos:station:NOAA.NOS.CO-OPS:8467150 urn:ioos:sensor:NOAA.NOS.CO-OPS:8467150:A1 41.1733 -73.1817 2014-04-20T08:00:00Z 2.212 urn:ioos:def:datum:noaa::MLLW 0.604
3 urn:ioos:station:NOAA.NOS.CO-OPS:8510560 urn:ioos:sensor:NOAA.NOS.CO-OPS:8510560:A1 41.0483 -71.9600 2014-04-20T08:00:00Z 0.449 urn:ioos:def:datum:noaa::MLLW 1.177
4 urn:ioos:station:NOAA.NOS.CO-OPS:8516945 urn:ioos:sensor:NOAA.NOS.CO-OPS:8516945:A1 40.8103 -73.7649 2014-04-20T08:00:00Z 2.493 urn:ioos:def:datum:noaa::MLLW 3.928

5 rows × 8 columns


In [31]:
stations = [sta.split(':')[-1] for sta in obs_loc_df['station_id']]
print stations
obs_lon = [sta for sta in obs_loc_df['longitude (degree)']]
obs_lat = [sta for sta in obs_loc_df['latitude (degree)']]


['8461490', '8465705', '8467150', '8510560', '8516945', '8518750', '8519483', '8531680', '8534720', '8539094', '8548989']

Get longName from SOS DescribeSensor (station) request


In [32]:
def get_Coops_longName(sta):
    """
    get longName for specific station from COOPS SOS using DescribeSensor request
    """
    url=('http://opendap.co-ops.nos.noaa.gov/ioos-dif-sos/SOS?service=SOS&'
        'request=DescribeSensor&version=1.0.0&outputFormat=text/xml;subtype="sensorML/1.0.1"&'
        'procedure=urn:ioos:station:NOAA.NOS.CO-OPS:%s') % sta
    tree = etree.parse(urllib2.urlopen(url))
    root = tree.getroot()
    longName=root.xpath("//sml:identifier[@name='longName']/sml:Term/sml:value/text()", namespaces={'sml':"http://www.opengis.net/sensorML/1.0.1"})
    return longName

Request CSV response from SOS and convert to Pandas DataFrames


In [33]:
def coops2df(collector,coops_id,sos_name):
    collector.features = [coops_id]
    collector.variables = [sos_name]
    response = collector.raw(responseFormat="text/csv")
    data_df = pd.read_csv(cStringIO.StringIO(str(response)), parse_dates=True, index_col='date_time')
#    data_df['Observed Data']=data_df['water_surface_height_above_reference_datum (m)']-data_df['vertical_position (m)']
    data_df['Observed Data']=data_df['water_surface_height_above_reference_datum (m)']

    a = get_Coops_longName(coops_id)
    if len(a)==0:
        long_name=coops_id
    else:
        long_name=a[0]
        
    data_df.name=long_name
    return data_df

Generate a uniform 6-min time base for model/data comparison:


In [34]:
ts_rng = pd.date_range(start=jd_start, end=jd_stop, freq='6Min')
ts = pd.DataFrame(index=ts_rng)
print jd_start,jd_stop
print len(ts)


2014-04-20 08:00:00 2014-04-26 08:00:00
1441

Create a list of obs dataframes, one for each station:


In [35]:
obs_df=[]
sta_names=[]
for sta in stations:
    b=coops2df(collector,sta,sos_name)
    sta_names.append(b.name)
    print b.name
    # limit interpolation to 10 points (10 @ 6min = 1 hour)
    obs_df.append(pd.DataFrame(pd.concat([b, ts],axis=1).interpolate(limit=10)['Observed Data']))
    obs_df[-1].name=b.name


New London, CT
New Haven, CT
Bridgeport, CT
Montauk, NY
Kings Point, NY
The Battery, NY
Bergen Point West Reach, NY
Sandy Hook, NJ
Atlantic City, NJ
Burlington, Delaware River, NJ
Newbold, PA

In [36]:
from matplotlib.transforms import offset_copy
geodetic = ccrs.Geodetic(globe=ccrs.Globe(datum='WGS84'))
figure(figsize=(10,10))
# Open Source Imagery from MapQuest (max zoom = 16?)
tiler = MapQuestOpenAerial()
# Open Street Map (max zoom = 18?)
#tiler = OSM()
ax = plt.axes(projection=tiler.crs)
extent=[box[0],box[2],box[1],box[3]]
ax.set_extent(extent, geodetic)
ax.add_image(tiler, 7)
plt.scatter(obs_lon,obs_lat,marker='o',s=30.0,
         color='cyan',transform=ccrs.PlateCarree())
geodetic_transform = ccrs.Geodetic()._as_mpl_transform(ax)
text_transform = offset_copy(geodetic_transform, units='dots', x=-7,y=+7)

for x,y,label in zip(obs_lon,obs_lat,sta_names):
    plt.text(x,y,label,horizontalalignment='left',transform=text_transform,color='white')
gl=ax.gridlines(draw_labels=True)
gl.xlabels_top = False
gl.ylabels_right = False
title('Water Level Gauge Locations')


Out[36]:
<matplotlib.text.Text at 0x569e410>

Get model output from OPeNDAP URLS

Try to open all the OPeNDAP URLS using Iris from the British Met Office. If 1D, assume dataset is data, if 2D assume dataset is an unstructured grid model, and if 3D, assume it's a structured grid model.

Construct an Iris contraint to load only cubes that match the std_name_list:


In [37]:
print name_list
def name_in_list(cube):
    return cube.standard_name in name_list
constraint = iris.Constraint(cube_func=name_in_list)


['water_surface_height_above_reference_datum', 'sea_surface_height_above_geoid', 'sea_surface_elevation', 'sea_surface_height_above_reference_ellipsoid', 'sea_surface_height_above_sea_level', 'sea_surface_height', 'water level']

In [38]:
def mod_df(arr,timevar,istart,istop,mod_name,ts):
    """
    return time series (DataFrame) from model interpolated onto uniform time base
    """
    t=timevar.points[istart:istop]
    jd = timevar.units.num2date(t)

    # eliminate any data that is closer together than 10 seconds
    # this was required to handle issues with CO-OPS aggregations, I think because
    # they use floating point time in hours, which is not very accurate, so the FMRC
    # aggregation is aggregating points that actually occur at the same time
    dt =diff(jd)
    s = array([ele.seconds for ele in dt])
    ind=where(s>10)[0]
    arr=arr[ind+1]
    jd=jd[ind+1]
    
    b = pd.DataFrame(arr,index=jd,columns=[mod_name])
    # eliminate any data with NaN
    b = b[isfinite(b[mod_name])]
    # interpolate onto uniform time base, fill gaps up to: (10 values @ 6 min = 1 hour)
    c = pd.concat([b, ts],axis=1).interpolate(limit=10)
    return c

In [39]:
print dap_urls


['http://colossus.dl.stevens-tech.edu/thredds/dodsC/latest/Complete_gcmplt.nc', 'http://tds.marine.rutgers.edu/thredds/dodsC/roms/espresso/2013_da/his_Best/ESPRESSO_Real-Time_v2_History_Best_Available_best.ncd', 'http://www.smast.umassd.edu:8080/thredds/dodsC/FVCOM/NECOFS/Forecasts/NECOFS_GOM3_FORECAST.nc', 'http://www.smast.umassd.edu:8080/thredds/dodsC/FVCOM/NECOFS/Forecasts/NECOFS_WAVE_FORECAST.nc', 'http://opendap.co-ops.nos.noaa.gov/thredds/dodsC/DBOFS/fmrc/Aggregated_7_day_DBOFS_Fields_Forecast_best.ncd', 'http://opendap.co-ops.nos.noaa.gov/thredds/dodsC/NYOFS/fmrc/Aggregated_7_day_NYOFS_Fields_Forecast_best.ncd', 'http://geoport.whoi.edu/thredds/dodsC/coawst_4/use/fmrc/coawst_4_use_best.ncd', 'http://oos.soest.hawaii.edu/thredds/dodsC/hioos/tide_pac', 'http://opendap.co-ops.nos.noaa.gov/thredds/dodsC/CBOFS/fmrc/Aggregated_7_day_CBOFS_Fields_Forecast_best.ncd', 'http://geoport-dev.whoi.edu/thredds/dodsC/estofs/atlantic', 'http://www.smast.umassd.edu:8080/thredds/dodsC/FVCOM/NECOFS/Forecasts/NECOFS_FVCOM_OCEAN_MASSBAY_FORECAST.nc', 'http://oos.soest.hawaii.edu/thredds/dodsC/pacioos/hycom/global', 'http://sos.maracoos.org/stable/dodsC/stationHRWSTPTH-agg.ncml', 'http://sos.maracoos.org/stable/dodsC/stationHRPIER84-agg.ncml', 'http://sos.maracoos.org/stable/dodsC/stationHRPMNTH-agg.ncml']

In [40]:
a = iris.load_cube(dap_urls[0],constraint)
# convert to units of meters
#       a.convert_units('m')     # this isn't working for unstructured data
# take first 20 chars for model name
mod_name = a.attributes['title'][0:20]
r = shape(a)
timevar = find_timevar(a)
lat = a.coord(axis='Y').points
lon = a.coord(axis='X').points
jd = timevar.units.num2date(timevar.points)
istart = timevar.nearest_neighbour_index(timevar.units.date2num(jd_start))
istop = timevar.nearest_neighbour_index(timevar.units.date2num(jd_stop))


---------------------------------------------------------------------------
ValueError                                Traceback (most recent call last)
<ipython-input-40-6a84239bac34> in <module>()
----> 1 a = iris.load_cube(dap_urls[0],constraint)
      2 # convert to units of meters
      3 #       a.convert_units('m')     # this isn't working for unstructured data
      4 # take first 20 chars for model name
      5 mod_name = a.attributes['title'][0:20]

/home/local/python27_epd/lib/python2.7/site-packages/iris/__init__.pyc in load_cube(uris, constraint, callback)
    309         raise ValueError('only a single constraint is allowed')
    310 
--> 311     cubes = _load_collection(uris, constraints, callback).merged().cubes()
    312 
    313     try:

/home/local/python27_epd/lib/python2.7/site-packages/iris/__init__.pyc in _load_collection(uris, constraints, callback)
    249     try:
    250         cubes = _generate_cubes(uris, callback)
--> 251         result = iris.cube._CubeFilterCollection.from_cubes(cubes, constraints)
    252     except EOFError as e:
    253         raise iris.exceptions.TranslationError(

/home/local/python27_epd/lib/python2.7/site-packages/iris/cube.pyc in from_cubes(cubes, constraints)
    134         pairs = [_CubeFilter(constraint) for constraint in constraints]
    135         collection = _CubeFilterCollection(pairs)
--> 136         for cube in cubes:
    137             collection.add_cube(cube)
    138         return collection

/home/local/python27_epd/lib/python2.7/site-packages/iris/__init__.pyc in _generate_cubes(uris, callback)
    240         elif scheme in ['http', 'https']:
    241             urls = [':'.join(x) for x in groups]
--> 242             for cube in iris.io.load_http(urls, callback):
    243                 yield cube
    244         else:

/home/local/python27_epd/lib/python2.7/site-packages/iris/io/__init__.pyc in load_http(urls, callback)
    211     # Call each iris format handler with the appropriate filenames
    212     for handling_format_spec, fnames in handler_map.iteritems():
--> 213         for cube in handling_format_spec.handler(fnames, callback):
    214             yield cube
    215 

/home/local/python27_epd/lib/python2.7/site-packages/iris/fileformats/netcdf.pyc in load_cubes(filenames, callback)
    444 #            if not cf_var.has_formula_terms():
    445             if True:
--> 446                 cube = _load_cube(engine, cf, cf_var, filename)
    447 
    448                 # Process any associated formula terms and attach

/home/local/python27_epd/lib/python2.7/site-packages/iris/fileformats/netcdf.pyc in _load_cube(engine, cf, cf_var, filename)
    361 
    362     # Run pyke inference engine with forward chaining rules.
--> 363     engine.activate(_PYKE_RULE_BASE)
    364 
    365     # Populate coordinate attributes with the untouched attributes from the

/home/local/python27_epd/lib/python2.7/site-packages/pyke/knowledge_engine.pyc in activate(self, *rb_names)
    294         add your facts before doing this!
    295         '''
--> 296         for rb_name in rb_names: self.get_rb(rb_name).activate()
    297 
    298     def lookup(self, kb_name, entity_name, pat_context, patterns):

/home/local/python27_epd/lib/python2.7/site-packages/pyke/rule_base.pyc in activate(self)
    157         self.engine.knowledge_bases[self.root_name] = self
    158         self.register_fc_rules(current_rb)
--> 159         self.run_fc_rules(current_rb)
    160 
    161     def reset(self):

/home/local/python27_epd/lib/python2.7/site-packages/pyke/rule_base.pyc in run_fc_rules(self, stop_at_rb)
    145         rb = self
    146         while rb is not stop_at_rb:
--> 147             for fc_rule in rb.fc_rules: fc_rule.run()
    148             if not rb.parent: break
    149             rb = rb.parent

/home/local/python27_epd/lib/python2.7/site-packages/pyke/fc_rule.pyc in run(self)
     88     def run(self):
     89         self.ran = True
---> 90         self.rule_fn(self)
     91 
     92     def new_fact(self, fact_args, n):

/home/local/python27_epd/lib/python2.7/site-packages/iris/fileformats/_pyke_rules/compiled_krb/fc_rules_cf_fc.pyc in fc_build_auxiliary_coordinate_latitude(rule, context, index)
    257             cf_coord_var = engine.cf_var.cf_group.auxiliary_coordinates[context.lookup_data('coordinate')]
    258             build_auxiliary_coordinate(engine, cf_coord_var,
--> 259             coord_name=CF_VALUE_STD_NAME_LAT)
    260             engine.rule_triggered.add(rule.name)
    261             rule.rule_base.num_fc_rules_triggered += 1

/home/local/python27_epd/lib/python2.7/site-packages/iris/fileformats/_pyke_rules/compiled_krb/fc_rules_cf_fc.pyc in build_auxiliary_coordinate(engine, cf_coord_var, coord_name, coord_system)
   1325                                  bounds=bounds_data,
   1326                                  attributes=attributes,
-> 1327                                  coord_system=coord_system)
   1328     cube.add_aux_coord(coord, data_dims)
   1329     engine.provides['coordinates'].append((coord, cf_coord_var.cf_name))

/home/local/python27_epd/lib/python2.7/site-packages/iris/coords.pyc in __init__(self, points, standard_name, long_name, var_name, units, bounds, attributes, coord_system)
    401 
    402         self.points = points
--> 403         self.bounds = bounds
    404 
    405     def __getitem__(self, key):

/home/local/python27_epd/lib/python2.7/site-packages/iris/coords.pyc in bounds(self, bounds)
   1505             # NB. Use _points to avoid triggering any lazy array.
   1506             if self._points.shape != bounds.shape[:-1]:
-> 1507                 raise ValueError("Bounds shape must be compatible with points "
   1508                                  "shape.")
   1509         self._bounds = bounds

ValueError: Bounds shape must be compatible with points shape.

In [39]:
# use only data within 0.04 degrees (about 4 km)
max_dist=0.04 

# use only data where the standard deviation of the time series exceeds 0.01 m (1 cm)
# this eliminates flat line model time series that come from land points that 
# should have had missing values.
min_var=0.01

for url in dap_urls:
    try:
        a = iris.load_cube(url,constraint)
        # convert to units of meters
 #       a.convert_units('m')     # this isn't working for unstructured data
        # take first 20 chars for model name
        mod_name = a.attributes['title'][0:20]
        r = shape(a)
        timevar = find_timevar(a)
        lat = a.coord(axis='Y').points
        lon = a.coord(axis='X').points
        jd = timevar.units.num2date(timevar.points)
        istart = timevar.nearest_neighbour_index(timevar.units.date2num(jd_start))
        istop = timevar.nearest_neighbour_index(timevar.units.date2num(jd_stop))
        
        # only proceed if we have data in the range requested
        if istart != istop:            
            nsta = len(obs_lon)
            if len(r)==3:
                print '[Structured grid model]:', url
                d = a[0,:,:].data
                # find the closest non-land point from a structured grid model
                if len(shape(lon))==1:
                    lon,lat= meshgrid(lon,lat)
                j,i,dd = find_ij(lon,lat,d,obs_lon,obs_lat)
                for n in range(nsta):
                    # only use if model cell is within 0.1 degree of requested location
                    if dd[n] <= max_dist:
                        arr = a[istart:istop,j[n],i[n]].data                    
                        if arr.std() >= min_var:
                            c = mod_df(arr,timevar,istart,istop,mod_name,ts)
                            name= obs_df[n].name
                            obs_df[n]=pd.concat([obs_df[n],c],axis=1)
                            obs_df[n].name = name
            elif len(r)==2:
                print '[Unstructured grid model]:', url
                # find the closest point from an unstructured grid model
                index,dd = nearxy(lon.flatten(),lat.flatten(),obs_lon,obs_lat)
                for n in range(nsta):
                    # only use if model cell is within 0.1 degree of requested location
                    if dd[n] <= max_dist:
                        arr = a[istart:istop,index[n]].data
                        if arr.std() >= min_var:
                            c = mod_df(arr,timevar,istart,istop,mod_name,ts)
                            name = obs_df[n].name
                            obs_df[n]=pd.concat([obs_df[n],c],axis=1)
                            obs_df[n].name = name 
            elif len(r)==1:
                print '[Data]:', url
                        
    except:
        pass


[Structured grid model]: http://tds.marine.rutgers.edu/thredds/dodsC/roms/espresso/2013_da/his_Best/ESPRESSO_Real-Time_v2_History_Best_Available_best.ncd
[Unstructured grid model]: http://www.smast.umassd.edu:8080/thredds/dodsC/FVCOM/NECOFS/Forecasts/NECOFS_GOM3_FORECAST.nc
[Structured grid model]:
/home/local/python27_epd/lib/python2.7/site-packages/iris/fileformats/_pyke_rules/compiled_krb/fc_rules_cf_fc.py:1216: UserWarning: Gracefully filling 'time' dimension coordinate masked points
  warnings.warn(msg.format(str(cf_coord_var.cf_name)))
/home/local/python27_epd/lib/python2.7/site-packages/iris/fileformats/_pyke_rules/compiled_krb/fc_rules_cf_fc.py:1147: UserWarning: Ignoring netCDF variable 'mask' invalid units 'nondimensional'
  warnings.warn(msg.format(msg_name, msg_units))
 http://opendap.co-ops.nos.noaa.gov/thredds/dodsC/DBOFS/fmrc/Aggregated_7_day_DBOFS_Fields_Forecast_best.ncd
[Structured grid model]: http://opendap.co-ops.nos.noaa.gov/thredds/dodsC/NYOFS/fmrc/Aggregated_7_day_NYOFS_Fields_Forecast_best.ncd
[Structured grid model]: http://geoport.whoi.edu/thredds/dodsC/coawst_4/use/fmrc/coawst_4_use_best.ncd
[Structured grid model]: http://oos.soest.hawaii.edu/thredds/dodsC/hioos/tide_pac
[Structured grid model]:
/home/local/python27_epd/lib/python2.7/site-packages/iris/fileformats/_pyke_rules/compiled_krb/fc_rules_cf_fc.py:1147: UserWarning: Ignoring netCDF variable 'sigma' invalid units 'sigma_level'
  warnings.warn(msg.format(msg_name, msg_units))
/home/local/python27_epd/lib/python2.7/site-packages/iris/fileformats/_pyke_rules/compiled_krb/fc_rules_cf_fc.py:1147: UserWarning: Ignoring netCDF variable 'nbdv' invalid units 'nondimensional'
  warnings.warn(msg.format(msg_name, msg_units))
 http://opendap.co-ops.nos.noaa.gov/thredds/dodsC/CBOFS/fmrc/Aggregated_7_day_CBOFS_Fields_Forecast_best.ncd
[Unstructured grid model]: http://geoport-dev.whoi.edu/thredds/dodsC/estofs/atlantic
[Unstructured grid model]: http://www.smast.umassd.edu:8080/thredds/dodsC/FVCOM/NECOFS/Forecasts/NECOFS_FVCOM_OCEAN_MASSBAY_FORECAST.nc
[Structured grid model]:
/home/local/python27_epd/lib/python2.7/site-packages/iris/fileformats/_pyke_rules/compiled_krb/fc_rules_cf_fc.py:1147: UserWarning: Ignoring netCDF variable 'neta' invalid units 'nondimensional'
  warnings.warn(msg.format(msg_name, msg_units))
/home/local/python27_epd/lib/python2.7/site-packages/iris/fileformats/_pyke_rules/compiled_krb/fc_rules_cf_fc.py:1147: UserWarning: Ignoring netCDF variable 'ibtypee' invalid units 'nondimensional'
  warnings.warn(msg.format(msg_name, msg_units))
/home/local/python27_epd/lib/python2.7/site-packages/iris/fileformats/_pyke_rules/compiled_krb/fc_rules_cf_fc.py:1147: UserWarning: Ignoring netCDF variable 'nvell' invalid units 'nondimensional'
  warnings.warn(msg.format(msg_name, msg_units))
/home/local/python27_epd/lib/python2.7/site-packages/iris/fileformats/_pyke_rules/compiled_krb/fc_rules_cf_fc.py:1147: UserWarning: Ignoring netCDF variable 'ibtype' invalid units 'nondimensional'
  warnings.warn(msg.format(msg_name, msg_units))
/home/local/python27_epd/lib/python2.7/site-packages/iris/fileformats/_pyke_rules/compiled_krb/fc_rules_cf_fc.py:1147: UserWarning: Ignoring netCDF variable 'nbvv' invalid units 'nondimensional'
  warnings.warn(msg.format(msg_name, msg_units))
/home/local/python27_epd/lib/python2.7/site-packages/iris/fileformats/_pyke_rules/compiled_krb/fc_rules_cf_fc.py:1147: UserWarning: Ignoring netCDF variable 'nvel' invalid units 'nondimensional'
  warnings.warn(msg.format(msg_name, msg_units))
/home/local/python27_epd/lib/python2.7/site-packages/iris/fileformats/_pyke_rules/compiled_krb/fc_rules_cf_fc.py:1147: UserWarning: Ignoring netCDF variable 'nvdll' invalid units 'nondimensional'
  warnings.warn(msg.format(msg_name, msg_units))
/home/local/python27_epd/lib/python2.7/site-packages/iris/fileformats/_pyke_rules/compiled_krb/fc_rules_cf_fc.py:1147: UserWarning: Ignoring netCDF variable 'sea_water_turbidity' invalid units 'FNU'
  warnings.warn(msg.format(msg_name, msg_units))
 http://oos.soest.hawaii.edu/thredds/dodsC/pacioos/hycom/global
[Data]:
/home/local/python27_epd/lib/python2.7/site-packages/iris/fileformats/_pyke_rules/compiled_krb/fc_rules_cf_fc.py:1147: UserWarning: Ignoring netCDF variable 'fractional_saturation_of_oxygen_in_sea_water' invalid units 'pct'
  warnings.warn(msg.format(msg_name, msg_units))
/home/local/python27_epd/lib/python2.7/site-packages/iris/fileformats/_pyke_rules/compiled_krb/fc_rules_cf_fc.py:1147: UserWarning: Ignoring netCDF variable 'relative_humidity' invalid units 'pct'
  warnings.warn(msg.format(msg_name, msg_units))
/home/local/python27_epd/lib/python2.7/site-packages/iris/fileformats/_pyke_rules/compiled_krb/fc_rules_cf_fc.py:1147: UserWarning: Ignoring netCDF variable 'sea_water_salinity' invalid units 'psu'
  warnings.warn(msg.format(msg_name, msg_units))
/home/local/python27_epd/lib/python2.7/site-packages/iris/fileformats/_pyke_rules/compiled_krb/fc_rules_cf_fc.py:1147: UserWarning: Ignoring netCDF variable 'wind_from_direction' invalid units 'deg'
  warnings.warn(msg.format(msg_name, msg_units))
/home/local/python27_epd/lib/python2.7/site-packages/iris/fileformats/_pyke_rules/compiled_krb/fc_rules_cf_fc.py:1147: UserWarning: Ignoring netCDF variable 'sea_water_turbidity' invalid units 'NTU'
  warnings.warn(msg.format(msg_name, msg_units))
 http://sos.maracoos.org/stable/dodsC/stationHRWSTPTH-agg.ncml

In [40]:
for df in obs_df:
    p=df.plot(figsize=(14,6),title=df.name,legend=False)
    setp(p.lines[0],linewidth=4.0,color=[0.7,0.7,0.7],zorder=1)
    legend()
    ylabel('m')



In [41]:
# plot again, but now remove the mean offset (relative to data) from all plots
for df in obs_df:
    amean=df[jd_start:jd_now].mean()
    df = df - amean + amean.ix[0]
    print amean.ix[0]-amean
#    df.plot(figsize=(14,6))
#    ylabel('m')


Observed Data           0.000000
NECOFS GOM3 (FVCOM)    -0.212933
dtype: float64
Observed Data           0.000000
NECOFS GOM3 (FVCOM)    -0.229611
ESTOFS Storm Surge M    0.013259
dtype: float64
Observed Data           0.000000
ROMS ESPRESSO Real-T    0.390508
NECOFS GOM3 (FVCOM)    -0.239577
ESTOFS Storm Surge M    0.015061
dtype: float64
Observed Data           0.000000
NECOFS GOM3 (FVCOM)    -0.209016
COAWST Forecast Syst    0.138004
ESTOFS Storm Surge M    0.021994
dtype: float64
Observed Data           0.000000
NECOFS GOM3 (FVCOM)    -0.264400
NYOFS - New York and    0.016119
ESTOFS Storm Surge M    0.032390
dtype: float64
Observed Data           0.000000
ROMS ESPRESSO Real-T    0.374784
NECOFS GOM3 (FVCOM)    -0.256632
NYOFS - New York and    0.069619
dtype: float64
Observed Data           0.000000
NYOFS - New York and    0.063050
COAWST Forecast Syst   -0.013823
dtype: float64
Observed Data           0.000000
ROMS ESPRESSO Real-T    0.432370
NECOFS GOM3 (FVCOM)    -0.193251
NYOFS - New York and    0.105552
COAWST Forecast Syst    0.058025
ESTOFS Storm Surge M    0.037476
dtype: float64
Observed Data           0.000000
ROMS ESPRESSO Real-T    0.400156
NECOFS GOM3 (FVCOM)    -0.200935
DBOFS - Delaware Bay   -0.024733
COAWST Forecast Syst    0.052602
ESTOFS Storm Surge M   -0.029907
dtype: float64
Observed Data           0.000000
DBOFS - Delaware Bay    0.041657
dtype: float64
Observed Data           0.000000
DBOFS - Delaware Bay    0.063437
dtype: float64

In [41]:
a=iris.load(dap_urls[0])


---------------------------------------------------------------------------
ValueError                                Traceback (most recent call last)
<ipython-input-41-b009bfc01bfc> in <module>()
----> 1 iris.load(dap_urls[0])

/home/local/python27_epd/lib/python2.7/site-packages/iris/__init__.pyc in load(uris, constraints, callback)
    279 
    280     """
--> 281     return _load_collection(uris, constraints, callback).merged().cubes()
    282 
    283 

/home/local/python27_epd/lib/python2.7/site-packages/iris/__init__.pyc in _load_collection(uris, constraints, callback)
    249     try:
    250         cubes = _generate_cubes(uris, callback)
--> 251         result = iris.cube._CubeFilterCollection.from_cubes(cubes, constraints)
    252     except EOFError as e:
    253         raise iris.exceptions.TranslationError(

/home/local/python27_epd/lib/python2.7/site-packages/iris/cube.pyc in from_cubes(cubes, constraints)
    134         pairs = [_CubeFilter(constraint) for constraint in constraints]
    135         collection = _CubeFilterCollection(pairs)
--> 136         for cube in cubes:
    137             collection.add_cube(cube)
    138         return collection

/home/local/python27_epd/lib/python2.7/site-packages/iris/__init__.pyc in _generate_cubes(uris, callback)
    240         elif scheme in ['http', 'https']:
    241             urls = [':'.join(x) for x in groups]
--> 242             for cube in iris.io.load_http(urls, callback):
    243                 yield cube
    244         else:

/home/local/python27_epd/lib/python2.7/site-packages/iris/io/__init__.pyc in load_http(urls, callback)
    211     # Call each iris format handler with the appropriate filenames
    212     for handling_format_spec, fnames in handler_map.iteritems():
--> 213         for cube in handling_format_spec.handler(fnames, callback):
    214             yield cube
    215 

/home/local/python27_epd/lib/python2.7/site-packages/iris/fileformats/netcdf.pyc in load_cubes(filenames, callback)
    444 #            if not cf_var.has_formula_terms():
    445             if True:
--> 446                 cube = _load_cube(engine, cf, cf_var, filename)
    447 
    448                 # Process any associated formula terms and attach

/home/local/python27_epd/lib/python2.7/site-packages/iris/fileformats/netcdf.pyc in _load_cube(engine, cf, cf_var, filename)
    361 
    362     # Run pyke inference engine with forward chaining rules.
--> 363     engine.activate(_PYKE_RULE_BASE)
    364 
    365     # Populate coordinate attributes with the untouched attributes from the

/home/local/python27_epd/lib/python2.7/site-packages/pyke/knowledge_engine.pyc in activate(self, *rb_names)
    294         add your facts before doing this!
    295         '''
--> 296         for rb_name in rb_names: self.get_rb(rb_name).activate()
    297 
    298     def lookup(self, kb_name, entity_name, pat_context, patterns):

/home/local/python27_epd/lib/python2.7/site-packages/pyke/rule_base.pyc in activate(self)
    157         self.engine.knowledge_bases[self.root_name] = self
    158         self.register_fc_rules(current_rb)
--> 159         self.run_fc_rules(current_rb)
    160 
    161     def reset(self):

/home/local/python27_epd/lib/python2.7/site-packages/pyke/rule_base.pyc in run_fc_rules(self, stop_at_rb)
    145         rb = self
    146         while rb is not stop_at_rb:
--> 147             for fc_rule in rb.fc_rules: fc_rule.run()
    148             if not rb.parent: break
    149             rb = rb.parent

/home/local/python27_epd/lib/python2.7/site-packages/pyke/fc_rule.pyc in run(self)
     88     def run(self):
     89         self.ran = True
---> 90         self.rule_fn(self)
     91 
     92     def new_fact(self, fact_args, n):

/home/local/python27_epd/lib/python2.7/site-packages/iris/fileformats/_pyke_rules/compiled_krb/fc_rules_cf_fc.pyc in fc_build_auxiliary_coordinate_latitude(rule, context, index)
    257             cf_coord_var = engine.cf_var.cf_group.auxiliary_coordinates[context.lookup_data('coordinate')]
    258             build_auxiliary_coordinate(engine, cf_coord_var,
--> 259             coord_name=CF_VALUE_STD_NAME_LAT)
    260             engine.rule_triggered.add(rule.name)
    261             rule.rule_base.num_fc_rules_triggered += 1

/home/local/python27_epd/lib/python2.7/site-packages/iris/fileformats/_pyke_rules/compiled_krb/fc_rules_cf_fc.pyc in build_auxiliary_coordinate(engine, cf_coord_var, coord_name, coord_system)
   1325                                  bounds=bounds_data,
   1326                                  attributes=attributes,
-> 1327                                  coord_system=coord_system)
   1328     cube.add_aux_coord(coord, data_dims)
   1329     engine.provides['coordinates'].append((coord, cf_coord_var.cf_name))

/home/local/python27_epd/lib/python2.7/site-packages/iris/coords.pyc in __init__(self, points, standard_name, long_name, var_name, units, bounds, attributes, coord_system)
    401 
    402         self.points = points
--> 403         self.bounds = bounds
    404 
    405     def __getitem__(self, key):

/home/local/python27_epd/lib/python2.7/site-packages/iris/coords.pyc in bounds(self, bounds)
   1505             # NB. Use _points to avoid triggering any lazy array.
   1506             if self._points.shape != bounds.shape[:-1]:
-> 1507                 raise ValueError("Bounds shape must be compatible with points "
   1508                                  "shape.")
   1509         self._bounds = bounds

ValueError: Bounds shape must be compatible with points shape.

In [42]:
dap_urls[0]


Out[42]:
'http://colossus.dl.stevens-tech.edu/thredds/dodsC/latest/Complete_gcmplt.nc'

In [43]:
url='http://colossus.dl.stevens-tech.edu/thredds/dodsC/latest/Complete_gcmplt.nc'
iris.load(url)


---------------------------------------------------------------------------
ValueError                                Traceback (most recent call last)
<ipython-input-43-12e8d34848f6> in <module>()
      1 url='http://colossus.dl.stevens-tech.edu/thredds/dodsC/latest/Complete_gcmplt.nc'
----> 2 iris.load(url)

/home/local/python27_epd/lib/python2.7/site-packages/iris/__init__.pyc in load(uris, constraints, callback)
    279 
    280     """
--> 281     return _load_collection(uris, constraints, callback).merged().cubes()
    282 
    283 

/home/local/python27_epd/lib/python2.7/site-packages/iris/__init__.pyc in _load_collection(uris, constraints, callback)
    249     try:
    250         cubes = _generate_cubes(uris, callback)
--> 251         result = iris.cube._CubeFilterCollection.from_cubes(cubes, constraints)
    252     except EOFError as e:
    253         raise iris.exceptions.TranslationError(

/home/local/python27_epd/lib/python2.7/site-packages/iris/cube.pyc in from_cubes(cubes, constraints)
    134         pairs = [_CubeFilter(constraint) for constraint in constraints]
    135         collection = _CubeFilterCollection(pairs)
--> 136         for cube in cubes:
    137             collection.add_cube(cube)
    138         return collection

/home/local/python27_epd/lib/python2.7/site-packages/iris/__init__.pyc in _generate_cubes(uris, callback)
    240         elif scheme in ['http', 'https']:
    241             urls = [':'.join(x) for x in groups]
--> 242             for cube in iris.io.load_http(urls, callback):
    243                 yield cube
    244         else:

/home/local/python27_epd/lib/python2.7/site-packages/iris/io/__init__.pyc in load_http(urls, callback)
    211     # Call each iris format handler with the appropriate filenames
    212     for handling_format_spec, fnames in handler_map.iteritems():
--> 213         for cube in handling_format_spec.handler(fnames, callback):
    214             yield cube
    215 

/home/local/python27_epd/lib/python2.7/site-packages/iris/fileformats/netcdf.pyc in load_cubes(filenames, callback)
    444 #            if not cf_var.has_formula_terms():
    445             if True:
--> 446                 cube = _load_cube(engine, cf, cf_var, filename)
    447 
    448                 # Process any associated formula terms and attach

/home/local/python27_epd/lib/python2.7/site-packages/iris/fileformats/netcdf.pyc in _load_cube(engine, cf, cf_var, filename)
    361 
    362     # Run pyke inference engine with forward chaining rules.
--> 363     engine.activate(_PYKE_RULE_BASE)
    364 
    365     # Populate coordinate attributes with the untouched attributes from the

/home/local/python27_epd/lib/python2.7/site-packages/pyke/knowledge_engine.pyc in activate(self, *rb_names)
    294         add your facts before doing this!
    295         '''
--> 296         for rb_name in rb_names: self.get_rb(rb_name).activate()
    297 
    298     def lookup(self, kb_name, entity_name, pat_context, patterns):

/home/local/python27_epd/lib/python2.7/site-packages/pyke/rule_base.pyc in activate(self)
    157         self.engine.knowledge_bases[self.root_name] = self
    158         self.register_fc_rules(current_rb)
--> 159         self.run_fc_rules(current_rb)
    160 
    161     def reset(self):

/home/local/python27_epd/lib/python2.7/site-packages/pyke/rule_base.pyc in run_fc_rules(self, stop_at_rb)
    145         rb = self
    146         while rb is not stop_at_rb:
--> 147             for fc_rule in rb.fc_rules: fc_rule.run()
    148             if not rb.parent: break
    149             rb = rb.parent

/home/local/python27_epd/lib/python2.7/site-packages/pyke/fc_rule.pyc in run(self)
     88     def run(self):
     89         self.ran = True
---> 90         self.rule_fn(self)
     91 
     92     def new_fact(self, fact_args, n):

/home/local/python27_epd/lib/python2.7/site-packages/iris/fileformats/_pyke_rules/compiled_krb/fc_rules_cf_fc.pyc in fc_build_auxiliary_coordinate_latitude(rule, context, index)
    257             cf_coord_var = engine.cf_var.cf_group.auxiliary_coordinates[context.lookup_data('coordinate')]
    258             build_auxiliary_coordinate(engine, cf_coord_var,
--> 259             coord_name=CF_VALUE_STD_NAME_LAT)
    260             engine.rule_triggered.add(rule.name)
    261             rule.rule_base.num_fc_rules_triggered += 1

/home/local/python27_epd/lib/python2.7/site-packages/iris/fileformats/_pyke_rules/compiled_krb/fc_rules_cf_fc.pyc in build_auxiliary_coordinate(engine, cf_coord_var, coord_name, coord_system)
   1325                                  bounds=bounds_data,
   1326                                  attributes=attributes,
-> 1327                                  coord_system=coord_system)
   1328     cube.add_aux_coord(coord, data_dims)
   1329     engine.provides['coordinates'].append((coord, cf_coord_var.cf_name))

/home/local/python27_epd/lib/python2.7/site-packages/iris/coords.pyc in __init__(self, points, standard_name, long_name, var_name, units, bounds, attributes, coord_system)
    401 
    402         self.points = points
--> 403         self.bounds = bounds
    404 
    405     def __getitem__(self, key):

/home/local/python27_epd/lib/python2.7/site-packages/iris/coords.pyc in bounds(self, bounds)
   1505             # NB. Use _points to avoid triggering any lazy array.
   1506             if self._points.shape != bounds.shape[:-1]:
-> 1507                 raise ValueError("Bounds shape must be compatible with points "
   1508                                  "shape.")
   1509         self._bounds = bounds

ValueError: Bounds shape must be compatible with points shape.

In [ ]: