SECOORA sea surface temperature time-series notebook

Produce weekly maps and tables for the SECOORA. Based on IOOS system-test notebook.


In [1]:
import time
start_time = time.time()

In [2]:
import os

%load_ext watermark
%watermark --githash --machine --python --packages iris,pyoos,owslib

style = os.path.join(os.pardir, os.pardir, 'style.css')


CPython 2.7.9
IPython 2.3.1

iris 1.7.2-DEV
pyoos 0.6.2
owslib 0.8-dev

compiler   : GCC 4.4.7 20120313 (Red Hat 4.4.7-1)
system     : Linux
release    : 3.16.7-7-desktop
machine    : x86_64
processor  : x86_64
CPU cores  : 4
interpreter: 64bit
Git hash   : 8a73c1ba44587ef755d52f9375876c8eec17620c

In [3]:
import pytz
from datetime import datetime, timedelta

from utilities import CF_names


# Choose the date range (e.g: stop = datetime(2014, 7, 7, 12)).
stop = datetime(2015, 2, 6, 12)

stop = stop.replace(tzinfo=pytz.utc)
start = stop - timedelta(days=7)

# SECOORA region (NC, SC GA, FL).
bbox = [-87.40, 24.25, -74.70, 36.70]

# CF-names to look for:
currents = CF_names['currents']
name_list = currents['u'] + currents['v'] + currents['speed_direction']

In [4]:
run_name = '{:%Y-%m-%d}'.format(stop)

if not os.path.exists(run_name):
    os.makedirs(run_name)

In [5]:
import iris
import pyoos
import owslib

import logging as log
reload(log)

fmt = '{:*^64}'.format
log.captureWarnings(True)
LOG_FILENAME = 'log.txt'
LOG_FILENAME = os.path.join(run_name, LOG_FILENAME)
log.basicConfig(filename=LOG_FILENAME,
                filemode='w',
                format='%(asctime)s %(levelname)s: %(message)s',
                datefmt='%I:%M:%S',
                level=log.INFO,
                stream=None)

log.info(fmt(' Run information '))
log.info('Run date: {:%Y-%m-%d %H:%M:%S}'.format(datetime.utcnow()))
log.info('Download start: {:%Y-%m-%d %H:%M:%S}'.format(start))
log.info('Download stop: {:%Y-%m-%d %H:%M:%S}'.format(stop))
log.info('Bounding box: {0:3.2f}, {1:3.2f},'
         '{2:3.2f}, {3:3.2f}'.format(*bbox))
log.info(fmt(' Software version '))
log.info('Iris version: {}'.format(iris.__version__))
log.info('owslib version: {}'.format(owslib.__version__))
log.info('pyoos version: {}'.format(pyoos.__version__))

In [6]:
from owslib import fes
from utilities import fes_date_filter

kw = dict(wildCard='*',
          escapeChar='\\',
          singleChar='?',
          propertyname='apiso:AnyText')

or_filt = fes.Or([fes.PropertyIsLike(literal=('*%s*' % val), **kw)
                  for val in name_list])

# Exclude ROMS Averages and History files.
not_filt = fes.Not([fes.PropertyIsLike(literal='*Averages*', **kw)])

begin, end = fes_date_filter(start, stop)
filter_list = [fes.And([fes.BBox(bbox), begin, end, or_filt, not_filt])]

In [7]:
from owslib.csw import CatalogueServiceWeb

endpoint = 'http://www.ngdc.noaa.gov/geoportal/csw'
csw = CatalogueServiceWeb(endpoint, timeout=60)
csw.getrecords2(constraints=filter_list, maxrecords=1000, esn='full')

log.info(fmt(' Catalog information '))
log.info("URL: {}".format(endpoint))
log.info("CSW version: {}".format(csw.version))
log.info("Number of datasets available: {}".format(len(csw.records.keys())))

In [8]:
from utilities import service_urls

dap_urls = service_urls(csw.records, service='odp:url')
sos_urls = service_urls(csw.records, service='sos:url')

log.info(fmt(' CSW '))
for rec, item in csw.records.items():
    log.info('{}'.format(item.title))

log.info(fmt(' DAP '))
for url in dap_urls:
    log.info('{}.html'.format(url))

log.info(fmt(' SOS '))
for url in sos_urls:
    log.info('{}'.format(url))

Add SECOORA models and observations.


In [9]:
from utilities import titles, fix_url

secoora_models = ['SABGOM', 'USEAST', 'USF_ROMS',
                  'USF_SWAN', 'USF_FVCOM']

for secoora_model in secoora_models:
    if titles[secoora_model] not in dap_urls:
        log.warning('{} not in the NGDC csw'.format(secoora_model))
        dap_urls.append(titles[secoora_model])

# NOTE: USEAST is not archived at the moment!
dap_urls = [fix_url(start, url) if 'SABGOM' in url else url for url in dap_urls]

FIXME: deal with ($u$, $v$) and speed, direction.


In [11]:
from iris.exceptions import CoordinateNotFoundError, ConstraintMismatchError

from utilities import TimeoutException, secoora_buoys, get_cubes

urls = list(secoora_buoys())

buoys = dict()
for url in urls:
    try:
        cubes = get_cubes(url, name_list=name_list,
                          bbox=bbox, time=(start, stop))
        buoy = url.split('/')[-1].split('.nc')[0]
        buoys.update({buoy: cubes[0]})
    except (RuntimeError, ValueError, TimeoutException,
            ConstraintMismatchError, CoordinateNotFoundError) as e:
        log.warning('Cannot get cube for: {}\n{}'.format(url, e))

In [13]:
name_list


Out[13]:
['surface_eastward_sea_water_velocity',
 'eastward_sea_water_velocity',
 'sea_water_x_velocity',
 'x_sea_water_velocity',
 'eastward_transformed_eulerian_mean_velocity',
 'northward_sea_water_velocity',
 'surface_northward_sea_water_velocity',
 'sea_water_y_velocity',
 'y_sea_water_velocity',
 'northward_transformed_eulerian_mean_velocity',
 'sea_water_speed',
 'direction_of_sea_water_velocity']

In [12]:
buoys


Out[12]:
{}

In [ ]:
units=iris.unit.Unit('m s-1')