Import python libs and define some local functions


In [11]:
url='http://oos.soest.hawaii.edu/thredds/dodsC/hioos/roms_assim/hiig/ROMS_Hawaii_Regional_Ocean_Model_Assimilation_best.ncd'
var='sea_water_potential_temperature'
cube = iris.load_cube(url,var)

In [12]:
print cube


sea_water_potential_temperature / (Celsius) (time: 89; depth: 36; latitude: 195; longitude: 295)
     Dimension coordinates:
          time                                   x          -             -               -
          depth                                  -          x             -               -
          latitude                               -          -             x               -
          longitude                              -          -             -               x
     Auxiliary coordinates:
          forecast_reference_time                x          -             -               -
     Attributes:
          Conventions: Unidata Dataset Discovery v1.0, CF-1.4
          Metadata_Conventions: Unidata Dataset Discovery v1.0, CF-1.4
          Metadata_Link: http://pacioos.org/metadata/roms_hiig_assimilation.html
          _ChunkSize: [  1  18  98 148]
          _CoordSysBuilder: ucar.nc2.dataset.conv.CF1Convention
          acknowledgment: The Pacific Islands Ocean Observing System (PacIOOS) is funded through...
          cdm_data_type: Grid
          comment: Model runs produced by Dr. Brian Powell (powellb@hawaii.edu).
          contributor_name: Jim Potemra
          contributor_role: distributor
          creator_email: powellb@hawaii.edu
          creator_name: Brian Powell
          creator_url: http://www.soest.hawaii.edu/oceanography/faculty/powell.html
          date_created: 2010-06-01
          date_issued: 2010-06-01
          date_modified: 2014-02-10
          featureType: GRID
          geospatial_lat_max: 23.9824
          geospatial_lat_min: 17.0184
          geospatial_lat_units: degrees_north
          geospatial_lon_max: -152.519
          geospatial_lon_min: -163.831
          geospatial_lon_units: degrees_east
          geospatial_vertical_max: 5500.0
          geospatial_vertical_min: 0.25
          geospatial_vertical_positive: down
          geospatial_vertical_units: meters
          history: New grid (02/2014) ;
FMRC Best Dataset
          id: roms_hiig_assimilation
          institution: University of Hawaii
          keywords: Oceans > Ocean Temperature > Potential Temperature, Oceans > Salinity/Density...
          keywords_vocabulary: GCMD Science Keywords
          license: The data may be used and redistributed for free but is not intended for...
          location: Proto fmrc:ROMS_Hawaii_Regional_Ocean_Model_Assimilation
          naming_authority: org.pacioos
          project: Pacific Islands Ocean Observing System (PacIOOS) (http://pacioos.org)
          publisher_email: jimp@hawaii.edu
          publisher_name: Pacific Islands Ocean Observing System (PacIOOS)
          publisher_url: http://pacioos.org
          references: http://pacioos.org/focus/modeling/roms.php, http://myroms.org, http://...
          source: Regional Ocean Modeling System (ROMS), http://myroms.org
          standard_name_vocabulary: CF-1.4
          summary: Regional Ocean Modeling System (ROMS) 3-day, 3-hourly data assimilating...
          time_coverage_resolution: PT3H
          time_coverage_start: 2014-02-04T00:00:00Z
          title: Regional Ocean Modeling System (ROMS): Main Hawaiian Islands: Data Ass...

In [13]:
#cube = iris.load(url,iris.Constraint(name=var.strip()))[0]
# flip z if necessary
print cube.coord(axis='Z').points


[  2.50000000e-01   1.00000000e+00   2.00000000e+00   5.00000000e+00
   1.00000000e+01   2.00000000e+01   3.00000000e+01   5.00000000e+01
   7.50000000e+01   1.00000000e+02   1.25000000e+02   1.50000000e+02
   2.00000000e+02   2.50000000e+02   3.00000000e+02   4.00000000e+02
   5.00000000e+02   6.00000000e+02   7.00000000e+02   8.00000000e+02
   9.00000000e+02   1.00000000e+03   1.10000000e+03   1.20000000e+03
   1.30000000e+03   1.40000000e+03   1.50000000e+03   1.75000000e+03
   2.00000000e+03   2.50000000e+03   3.00000000e+03   3.50000000e+03
   4.00000000e+03   4.50000000e+03   5.00000000e+03   5.50000000e+03]

In [7]:
cube.coord(axis='Z').attributes


Out[7]:
{'_CoordinateAxisType': 'Height',
 '_CoordinateZisPositive': 'down',
 'positive': 'down'}

In [4]:
def myplot(slice):
    # make the plot
    figure(figsize=(12,8))
    lat=slice.coord(axis='Y').points
    lon=slice.coord(axis='X').points
    time=slice.coord('time')[0]
    subplot(111,aspect=(1.0/cos(mean(lat)*pi/180.0)))
    pcolormesh(lon,lat,ma.masked_invalid(slice.data));
    colorbar()
    grid()
    try:
        titl=slice.attributes['title']
    except:
        titl=slice.attributes['location']
    date=time.units.num2date(time.points)
    date_str=date[0].strftime('%Y-%m-%d %H:%M:%S %Z')
    plt.title('%s: %s: %s' % (titl,slice.long_name,date_str));

Specify Time


In [5]:
#mytime=dt.datetime(2008,7,28,12)  #specified time...
mytime=dt.datetime.utcnow()      # .... or now

Specify Vertical Level to Plot


In [6]:
# level 0=surface, -1=bottom
lev = 0

Specify some specific DAP URLS


In [7]:
#Rutgers ROMS Espresso latest forecast
url='http://tds.marine.rutgers.edu/thredds/dodsC/roms/espresso/2013_da/his_Best/ESPRESSO_Real-Time_v2_History_Best_Available_best.ncd'
var = 'sea_water_potential_temperature'
slice=var_lev_date(url=url,var=var, mytime=mytime, lev=lev)
myplot(slice)


slice retrieved in 3.697080 seconds
/home/local/python27_epd/lib/python2.7/site-packages/iris/fileformats/_pyke_rules/compiled_krb/fc_rules_cf_fc.py:1216: UserWarning: Gracefully filling 'time' dimension coordinate masked points
  warnings.warn(msg.format(str(cf_coord_var.cf_name)))

In [10]:
# SECOORA/NCSU
url='http://omgsrv1.meas.ncsu.edu:8080/thredds/dodsC/fmrc/sabgom/SABGOM_Forecast_Model_Run_Collection_best.ncd'
var='potential temperature'
slice=var_lev_date(url=url,var=var, mytime=mytime, lev=lev)
myplot(slice)


slice retrieved in 5.939326 seconds

In [11]:
# CENCOOS/UCSC
url='http://oceanmodeling.pmc.ucsc.edu:8080/thredds/dodsC/ccsnrt/fmrc/CCSNRT_Aggregation_best.ncd'
var='potential temperature'
slice=var_lev_date(url=url,var=var, mytime=mytime, lev=lev)
myplot(slice)


slice retrieved in 6.098920 seconds

In [7]:
# HIOOS
url='http://oos.soest.hawaii.edu/thredds/dodsC/hioos/roms_assim/hiig/ROMS_Hawaii_Regional_Ocean_Model_Assimilation_best.ncd'
var='sea_water_potential_temperature'
slice=var_lev_date(url=url,var=var, mytime=mytime, lev=lev)
myplot(slice)


slice retrieved in 1.723796 seconds
/home/local/python27_epd/lib/python2.7/site-packages/iris/fileformats/_pyke_rules/compiled_krb/fc_rules_cf_fc.py:1216: UserWarning: Gracefully filling 'time' dimension coordinate masked points
  warnings.warn(msg.format(str(cf_coord_var.cf_name)))

In [ ]:
# Global RTOFS/NCEP
url='http://ecowatch.ncddc.noaa.gov/thredds/dodsC/hycom/hycom_reg1_agg/HYCOM_Region_1_Aggregation_best.ncd'
var='sea_water_temperature' 
subsample=2
slice=var_lev_date(url=url,var=var, mytime=mytime, lev=lev, subsample=subsample)
myplot(slice)

In [ ]:
print slice

Now instead of specifying models URLS, find them via Catalog Services for the Web (CSW)


In [19]:
endpoint = 'http://www.ngdc.noaa.gov/geoportal/csw' #  NGDC/IOOS Geoportal
#endpoint = 'http://www.nodc.noaa.gov/geoportal/csw'   # NODC/UAF Geoportal: granule level
csw = CatalogueServiceWeb(endpoint,timeout=60)
csw.version


Out[19]:
'2.0.2'

In [20]:
for oper in csw.operations:
    if oper.name == 'GetRecords':
        print 'COMMON Queryables:\n',oper.constraints['SupportedCommonQueryables']['values']
        print '\nISO Queryables:\n',oper.constraints['SupportedISOQueryables']['values']


COMMON Queryables:
['Subject', 'Title', 'Abstract', 'AnyText', 'Format', 'Identifier', 'Modified', 'Type', 'BoundingBox']

ISO Queryables:
['apiso:Subject', 'apiso:Title', 'apiso:Abstract', 'apiso:AnyText', 'apiso:Format', 'apiso:Identifier', 'apiso:Modified', 'apiso:Type', 'apiso:BoundingBox', 'apiso:CRS.Authority', 'apiso:CRS.ID', 'apiso:CRS.Version', 'apiso:RevisionDate', 'apiso:AlternateTitle', 'apiso:CreationDate', 'apiso:PublicationDate', 'apiso:OrganizationName', 'apiso:HasSecurityConstraints', 'apiso:Language', 'apiso:ResourceIdentifier', 'apiso:ParentIdentifier', 'apiso:KeywordType', 'apiso:TopicCategory', 'apiso:ResourceLanguage', 'apiso:GeographicDescriptionCode', 'apiso:Denominator', 'apiso:DistanceValue', 'apiso:DistanceUOM', 'apiso:TempExtent_begin', 'apiso:TempExtent_end', 'apiso:ServiceType', 'apiso:ServiceTypeVersion', 'apiso:Operation', 'apiso:OperatesOn', 'apiso:OperatesOnIdentifier', 'apiso:OperatesOnName', 'apiso:CouplingType']

In [21]:
# hopefully something like this will be implemented in fes soon
def dateRange(start_date='1900-01-01',stop_date='2100-01-01',constraint='overlaps'):
    if constraint == 'overlaps':
        start = fes.PropertyIsLessThanOrEqualTo(propertyname='apiso:TempExtent_begin', literal=stop_date)
        stop = fes.PropertyIsGreaterThanOrEqualTo(propertyname='apiso:TempExtent_end', literal=start_date)
    elif constraint == 'within':
        start = fes.PropertyIsGreaterThanOrEqualTo(propertyname='apiso:TempExtent_begin', literal=start_date)
        stop = fes.PropertyIsLessThanOrEqualTo(propertyname='apiso:TempExtent_end', literal=stop_date)
    return start,stop

In [22]:
search_text = ['roms','selfe','adcirc','ncom','hycom','fvcom']
start_date='2012-05-01'
stop_date='2012-06-01'
box=[-160, 19, -156, 23]   # pacioos
std_name='sea_water_potential_temperature'
service_type = 'opendap'
start,stop = dateRange(start_date,stop_date) bbox = fes.BBox(box) filt=[] for val in search_text: filt.append(fes.PropertyIsLike(propertyname='apiso:anyText', literal=val)) #keywords = fes.PropertyIsLike(propertyname='apiso:Keywords', literal=std_name) keywords = fes.PropertyIsEqualTo(propertyname='apiso:Keywords', literal=std_name) serviceType =[fes.PropertyIsLike(propertyname='apiso:ServiceType', literal=('*%s*' % service_type)) filt.append([serviceType])

In [66]:
#csw.getrecords2(constraints=[[keywords,serviceType]],maxrecords=5,esn='full')
#csw.getrecords2(constraints=[[anytext,serviceType]],maxrecords=10,esn='full')
csw.getrecords2(constraints=filt,maxrecords=1000,esn='full')

len(csw.records.keys())


Out[66]:
164

In [59]:
for rec in csw.records:


[<owslib.fes.PropertyIsLike object at 0x5a7f3d0>, <owslib.fes.PropertyIsLike object at 0x5a7f350>, <owslib.fes.PropertyIsLike object at 0x5a7f450>, <owslib.fes.PropertyIsLike object at 0x5a7f490>, <owslib.fes.PropertyIsLike object at 0x5a7f4d0>, <owslib.fes.PropertyIsLike object at 0x5a7f510>]

In [ ]:
import random
choice=random.choice(list(csw.records.keys()))
print choice
csw.records[choice].references

In [67]:
# get specific ServiceType URL from records
def service_urls(records,service_string='urn:x-esri:specification:ServiceType:odp:url'):
    urls=[]
    for key,rec in records.iteritems():
        #create a generator object, and iterate through it until the match is found
        #if not found, gets the default value (here "none")
        url = next((d['url'] for d in rec.references if d['scheme'] == service_string), None)
        if url is not None:
            urls.append(url)
    return urls

In [ ]:
import netCDF4
#dap_urls = service_urls(csw.records,service_string='urn:x-esri:specification:ServiceType:OPeNDAP')
dap_urls = service_urls(csw.records,service_string='urn:x-esri:specification:ServiceType:odp:url')
bad_urls=[]
for url in dap_urls:
        try:
            nc = netCDF4.Dataset(url)
            print url
            nc.close()
        except:
            bad_urls.append(url)

In [ ]:
print bad_urls

In [ ]: