In [11]:
url='http://oos.soest.hawaii.edu/thredds/dodsC/hioos/roms_assim/hiig/ROMS_Hawaii_Regional_Ocean_Model_Assimilation_best.ncd'
var='sea_water_potential_temperature'
cube = iris.load_cube(url,var)
In [12]:
print cube
In [13]:
#cube = iris.load(url,iris.Constraint(name=var.strip()))[0]
# flip z if necessary
print cube.coord(axis='Z').points
In [7]:
cube.coord(axis='Z').attributes
Out[7]:
In [4]:
def myplot(slice):
# make the plot
figure(figsize=(12,8))
lat=slice.coord(axis='Y').points
lon=slice.coord(axis='X').points
time=slice.coord('time')[0]
subplot(111,aspect=(1.0/cos(mean(lat)*pi/180.0)))
pcolormesh(lon,lat,ma.masked_invalid(slice.data));
colorbar()
grid()
try:
titl=slice.attributes['title']
except:
titl=slice.attributes['location']
date=time.units.num2date(time.points)
date_str=date[0].strftime('%Y-%m-%d %H:%M:%S %Z')
plt.title('%s: %s: %s' % (titl,slice.long_name,date_str));
In [5]:
#mytime=dt.datetime(2008,7,28,12) #specified time...
mytime=dt.datetime.utcnow() # .... or now
In [6]:
# level 0=surface, -1=bottom
lev = 0
In [7]:
#Rutgers ROMS Espresso latest forecast
url='http://tds.marine.rutgers.edu/thredds/dodsC/roms/espresso/2013_da/his_Best/ESPRESSO_Real-Time_v2_History_Best_Available_best.ncd'
var = 'sea_water_potential_temperature'
slice=var_lev_date(url=url,var=var, mytime=mytime, lev=lev)
myplot(slice)
In [10]:
# SECOORA/NCSU
url='http://omgsrv1.meas.ncsu.edu:8080/thredds/dodsC/fmrc/sabgom/SABGOM_Forecast_Model_Run_Collection_best.ncd'
var='potential temperature'
slice=var_lev_date(url=url,var=var, mytime=mytime, lev=lev)
myplot(slice)
In [11]:
# CENCOOS/UCSC
url='http://oceanmodeling.pmc.ucsc.edu:8080/thredds/dodsC/ccsnrt/fmrc/CCSNRT_Aggregation_best.ncd'
var='potential temperature'
slice=var_lev_date(url=url,var=var, mytime=mytime, lev=lev)
myplot(slice)
In [7]:
# HIOOS
url='http://oos.soest.hawaii.edu/thredds/dodsC/hioos/roms_assim/hiig/ROMS_Hawaii_Regional_Ocean_Model_Assimilation_best.ncd'
var='sea_water_potential_temperature'
slice=var_lev_date(url=url,var=var, mytime=mytime, lev=lev)
myplot(slice)
In [ ]:
# Global RTOFS/NCEP
url='http://ecowatch.ncddc.noaa.gov/thredds/dodsC/hycom/hycom_reg1_agg/HYCOM_Region_1_Aggregation_best.ncd'
var='sea_water_temperature'
subsample=2
slice=var_lev_date(url=url,var=var, mytime=mytime, lev=lev, subsample=subsample)
myplot(slice)
In [ ]:
print slice
In [19]:
endpoint = 'http://www.ngdc.noaa.gov/geoportal/csw' # NGDC/IOOS Geoportal
#endpoint = 'http://www.nodc.noaa.gov/geoportal/csw' # NODC/UAF Geoportal: granule level
csw = CatalogueServiceWeb(endpoint,timeout=60)
csw.version
Out[19]:
In [20]:
for oper in csw.operations:
if oper.name == 'GetRecords':
print 'COMMON Queryables:\n',oper.constraints['SupportedCommonQueryables']['values']
print '\nISO Queryables:\n',oper.constraints['SupportedISOQueryables']['values']
In [21]:
# hopefully something like this will be implemented in fes soon
def dateRange(start_date='1900-01-01',stop_date='2100-01-01',constraint='overlaps'):
if constraint == 'overlaps':
start = fes.PropertyIsLessThanOrEqualTo(propertyname='apiso:TempExtent_begin', literal=stop_date)
stop = fes.PropertyIsGreaterThanOrEqualTo(propertyname='apiso:TempExtent_end', literal=start_date)
elif constraint == 'within':
start = fes.PropertyIsGreaterThanOrEqualTo(propertyname='apiso:TempExtent_begin', literal=start_date)
stop = fes.PropertyIsLessThanOrEqualTo(propertyname='apiso:TempExtent_end', literal=stop_date)
return start,stop
In [22]:
search_text = ['roms','selfe','adcirc','ncom','hycom','fvcom']
start_date='2012-05-01'
stop_date='2012-06-01'
box=[-160, 19, -156, 23] # pacioos
std_name='sea_water_potential_temperature'
service_type = 'opendap'
In [66]:
#csw.getrecords2(constraints=[[keywords,serviceType]],maxrecords=5,esn='full')
#csw.getrecords2(constraints=[[anytext,serviceType]],maxrecords=10,esn='full')
csw.getrecords2(constraints=filt,maxrecords=1000,esn='full')
len(csw.records.keys())
Out[66]:
In [59]:
for rec in csw.records:
In [ ]:
import random
choice=random.choice(list(csw.records.keys()))
print choice
csw.records[choice].references
In [67]:
# get specific ServiceType URL from records
def service_urls(records,service_string='urn:x-esri:specification:ServiceType:odp:url'):
urls=[]
for key,rec in records.iteritems():
#create a generator object, and iterate through it until the match is found
#if not found, gets the default value (here "none")
url = next((d['url'] for d in rec.references if d['scheme'] == service_string), None)
if url is not None:
urls.append(url)
return urls
In [ ]:
import netCDF4
#dap_urls = service_urls(csw.records,service_string='urn:x-esri:specification:ServiceType:OPeNDAP')
dap_urls = service_urls(csw.records,service_string='urn:x-esri:specification:ServiceType:odp:url')
bad_urls=[]
for url in dap_urls:
try:
nc = netCDF4.Dataset(url)
print url
nc.close()
except:
bad_urls.append(url)
In [ ]:
print bad_urls
In [ ]: