In [ ]:
# Embed plots in web page. No floating window.
%matplotlib inline
# svg increases resolution when you zoom in (Ctrl-+); png does not.
# Use svg format (scalable vector graphics) for plots in web page, not png
%config InlineBackend.figure_formats=['png']

In [ ]:
#Copied from https://www.xormedia.com/natural-sort-order-with-zero-padding/

from sys import maxint
import re

# optional '-' to support negative numbers
_num_re = re.compile(r'-?\d+')
# number of chars in the largest possible int
_maxint_digits = len(str(maxint))
# format for zero padding positive integers
_zero_pad_int_fmt = '{{0:0{0}d}}'.format(_maxint_digits)
# / is 0 - 1, so that negative numbers will come before positive
_zero_pad_neg_int_fmt = '/{{0:0{0}d}}'.format(_maxint_digits)


def _zero_pad(match):
    n = int(match.group(0))
    # if n is negative, we'll use the negative format and flip the number using
    # maxint so that -2 comes before -1, ...
    return _zero_pad_int_fmt.format(n) \
        if n > -1 else _zero_pad_neg_int_fmt.format(n + maxint)

def zero_pad_numbers(s):
    return _num_re.sub(_zero_pad, s)

In [ ]:
import datetime as dt
from datetime import datetime, timedelta
import pytz
import pandas as pd

def spc_hailwind_filename(hailwind,year):
    name = '/glade/u/home/ahijevyc/share/'+hailwind+'/'
    if hailwind=='torn':
        return name + 'Actual_tornadoes.csv'
    if year>=2008:
        name = name + str(year) + "_" + hailwind + ".csv"
    if year >= 2005 and year <= 2007:
        name = name + "2005-2007_"+hailwind+".csv"
    return name


def filter_rpts(rpts, start, end):
    # convert GMT to CST
    if any(rpts['tz'] == 9):
        rpts['yr_mo_dy_time'][rpts['tz']==9] = rpts['yr_mo_dy_time'] - dt.timedelta(hours=6)
        rpts['tz'][rpts['tz']==9] = 3
        
    if any(rpts['tz'] != 3):
        print start, end
        print rpts[['om','yr_mo_dy_time','tz']][rpts['tz'] != 3]
        print "WARNING - proceeding with program. Wrote to SPC Mar 22 2017 about fixing these lines"
    
    times = rpts['yr_mo_dy_time'] + dt.timedelta(hours=6) # Convert from CST to UTC
    correct_date = (times >= start) & (times < end)
    return rpts[correct_date]


def storm_rpts(type, start, end):
    
    possible_types = ["wind", "hail", "torn"]
    if type not in possible_types:
        print "unknown storm report type:", type
        print "must be ", possible_types
        sys.exit(2)

    # csv format described in http://www.spc.noaa.gov/wcm/data/SPC_severe_database_description.pdf
    # SPC storm report files downloaded from http://www.spc.noaa.gov/wcm/#data to 
    # yellowstone: ~ahijevyc/share/ March 2017.
    if type == "wind":
        cols = ['om','yr','mo','dy','date','time','tz', 'st','stf','stn','mag','inj',
                'fat','loss','closs','slat','slon','elat','elon','len','wid','ns',
                'sn','sg','f1','f2','f3','f4','mt']
    if type == "hail":
        cols = ['om','yr','mo','dy','date','time','tz', 'st','stf','stn','sz','inj',
                'fat','loss','closs','slat','slon','elat','elon','len','wid','ns',
                'sn','sg','f1','f2','f3','f4','mt']
    if type == "torn":
        cols = ['om','yr','mo','dy','date','time','tz', 'st','stf','stn','f','inj',
                'fat','loss','closs','slat','slon','elat','elon','len','wid','ns',
                'sn','sg','f1','f2','f3','f4','mt']

    rpts_file = spc_hailwind_filename(type,run_date.year)
    rpts = pd.read_csv(rpts_file,header=None,names=cols, parse_dates=[['yr','mo','dy','time']],
                       infer_datetime_format=True)
    
    rpts = filter_rpts(rpts, start, end)
    return rpts

In [ ]:
import matplotlib.pyplot as plt
from matplotlib.patches import Polygon, PathPatch, Patch
from matplotlib.collections import PatchCollection
from mpl_toolkits.basemap import Basemap
from glob import glob
from itertools import cycle
import sys, json, os
import numpy as np
from hagelslag.data import ModelOutput
from hagelslag.processing import read_geojson
from hagelslag.util.make_proj_grids import read_ncar_map_file, make_proj_grids
from netCDF4 import Dataset
from mysavfig import mysavfig
import numbers
plt.rcParams.update({'mathtext.default': 'regular'})

def load_json_tracks(path, run_date, member, model_format):
    storm_tracks = []
    search_str = path + run_date.strftime("%Y%m%d/") + member + "/" + model_format + "*.json"

    model_files = sorted(glob(search_str), key=zero_pad_numbers)
    if not model_files:
        print "in load_json_tracks(), no model_files found."
        print "run_date=",run_date,"member=",member,"model_format=",model_format
        print "search str=",search_str


    old_mtime = 0
    for model_file in model_files:
        # Check if modification time is earlier than previous file. 
        # This indicates an older run still cluttering the directory.
        mtime = os.path.getmtime(model_file)
        if mtime < old_mtime:
            print model_file, mtime, "was modified before", old_model_file, old_mtime
            print "an older run may be cluttering "+search_str
            print "removing", model_file
            os.remove(model_file)
            continue
        storm_tracks.append(read_geojson(model_file))
        old_mtime = mtime
        old_model_file = model_file
    return storm_tracks

def add_basemap(ax,drawcounties=True):
    proj_dict, grid_dict = read_ncar_map_file("/glade/p/work/ahijevyc/hagelslag/mapfiles/VSE.txt")
    m = Basemap(resolution="i", # "c"< "l"< "i" <"h"
            llcrnrlon=grid_dict["sw_lon"],
            urcrnrlon=grid_dict["ne_lon"],
            llcrnrlat=grid_dict["sw_lat"],
            urcrnrlat=grid_dict["ne_lat"],
            rsphere = (proj_dict["a"], proj_dict["b"]), 
            projection=proj_dict["proj"], lat_2=proj_dict["lat_2"], lat_1=proj_dict["lat_1"],
             lat_0=proj_dict["lat_0"], lon_0=proj_dict["lon_0"], ax=ax)
    if drawcounties:
        m.drawcounties(linewidth=0.05)
    m.drawstates()
    m.drawcountries()
    m.drawcoastlines(linewidth=0.5)
    m.drawparallels(np.arange(0.,81.,2.),labels=[True,False,False,False],linewidth=0.4)
    meridians = np.arange(0.,351.,2.)
    m.drawmeridians(meridians,labels=[False,False,False,True],linewidth=0.4)
    return m


def output_netcdf_file(filename, mask_grid, proj_dict, grid_dict):

    if os.path.exists(filename):
        os.remove(filename)
        print "removed old",filename
    basedir = os.path.dirname(filename)
    if not os.path.exists(basedir):
        print "trying to write",os.path.basename(filename),"but directory",basedir,"must be created first"
        os.makedirs(basedir)
    try:
        out_set = Dataset(filename, "w", clobber=False) # Set clobber to True only if you want to recreate terrain-anchored object file.
    except:
        print "not creating", filename
        return
    print "creating", filename
    dx = grid_dict['dx']
    out_set.createDimension("y", mask_grid.shape[0])
    out_set.createDimension("x", mask_grid.shape[1])
    out_set.set_auto_mask(True)
    var = out_set.createVariable("count", 'u8', ("y", "x"), zlib=True)
    var[:] = mask_grid
    var.long_name = "object count"
    x = out_set.createVariable("x", 'double', ("x"))
    x[:] = dx * np.arange(mask_grid.shape[1])
    x.units = "m"
    x.long_name  = "x coordinate of projection"
    y = out_set.createVariable("y", 'double', ("y"))
    y[:] = dx * np.arange(mask_grid.shape[0])
    y.units = "m"
    y.long_name  = "y coordinate of projection"

    latitude = out_set.createVariable("latitude", 'double', ("y","x"), zlib=True)
    latitude[:] = grid_dict["lat"]
    latitude.units = "degrees_north"
    latitude.long_name = "latitude"
    longitude = out_set.createVariable("longitude", 'double', ("y","x"), zlib=True)
    longitude[:] = grid_dict["lon"]
    longitude.units = "degrees_east"
    longitude.long_name = "longitude"
    
    
    for k, v in proj_dict.items():
        setattr(out_set, k, v)
    for k, v in grid_dict.items():
        if k in ["i", "j", "lon", "lat", "x", "y"]:
            #print "skipping",k,v
            continue
        try:
            #print 'setting',k,v
            setattr(out_set, k, v)
        except:
            pass
    out_set.close()
    print "created",filename

In [ ]:
members = ["3km_pbl1","1km_on_3km_pbl1"]
#members = ["1km_on_3km_pbl2"]
model_format = "VSE"
ensemble_name = "VSE"

ncf = Dataset("/glade/p/work/ahijevyc/hagelslag/mapfiles/"+model_format+"_mask.nc")
landmask = ncf.variables["usa_mask"][:]
ncf.close()
#ncea -y ttl /glade/scratch/ahijevyc/OBJECT_TRACK/track_data_VSE_json_WSPD10MAX_15/2*/*3km_pbl1/*.object_count.nc terrain_obj_file
terrain_obj_file = "/glade/p/work/ahijevyc/hagelslag/out/u.nc"
ncf = Dataset(terrain_obj_file)
nobj = ncf.variables["count"][:]
ncf.close()

model_path = "/glade/scratch/ahijevyc/"+ensemble_name+"/"
odir = "/glade/p/work/ahijevyc/hagelslag/out/"
#fields = ["MAX_UPDRAFT_HELICITY_25"] * len(members)
fields = ["MAX_UPDRAFT_HELICITY_87"]
#fields = ["UP_HELI_MAX03_22"]#,"UP_HELI_MAX03_77"]
#fields = ["UP_HELI_MAX03_25"] * 2
#fields = ["WSPD10MAX_15"] * 2
#fields = "HAIL2D"
#fields = ["REFL_1KM_AGL_40"] * 2
if len(members) != len(fields):
    print "# members not equal to # fields", members, fields
    sys.exit(2)
pixel_thresh = 1
SPC_rpts = False

dates = [datetime(2005, 1,13,12,tzinfo=pytz.UTC),
         datetime(2005,12,28,12,tzinfo=pytz.UTC),
         datetime(2006, 1,13,12,tzinfo=pytz.UTC),
         datetime(2007, 1, 4,12,tzinfo=pytz.UTC),
         datetime(2007, 1, 7,12,tzinfo=pytz.UTC),
         datetime(2007, 2,12,12,tzinfo=pytz.UTC),
         datetime(2007, 2,13,12,tzinfo=pytz.UTC),
         datetime(2007, 2,24,12,tzinfo=pytz.UTC),
         datetime(2008, 2,12,12,tzinfo=pytz.UTC),
         datetime(2008, 2,16,12,tzinfo=pytz.UTC),
         datetime(2008, 2,17,12,tzinfo=pytz.UTC),
         datetime(2008, 2,25,12,tzinfo=pytz.UTC),
         datetime(2008,12, 9,12,tzinfo=pytz.UTC),
         datetime(2009, 2,18,12,tzinfo=pytz.UTC),
         datetime(2009,12,24,12,tzinfo=pytz.UTC),
         datetime(2010, 1,20,12,tzinfo=pytz.UTC),
         datetime(2010,12,31,12,tzinfo=pytz.UTC),
         datetime(2011, 2,28,12,tzinfo=pytz.UTC),
         datetime(2011,12,22,12,tzinfo=pytz.UTC),
         datetime(2012, 1,22,12,tzinfo=pytz.UTC),
         datetime(2012, 1,25,12,tzinfo=pytz.UTC)]

#dates = [ datetime(2008,2,25,12,tzinfo=pytz.UTC) ]
for run_date in dates:

    rows = len(members)+1
    fig, ax = plt.subplots(rows, figsize=(8.5,5*rows))
    color_list = cycle(["violet", "green", "cyan", "blue", "purple", "darkgreen", "teal", "royalblue"])

    windrpts = storm_rpts("wind", run_date, run_date + dt.timedelta(hours=24))
    hailrpts = storm_rpts("hail", run_date, run_date + dt.timedelta(hours=24))
    tornrpts = storm_rpts("torn", run_date, run_date + dt.timedelta(hours=24))

    wlons, wlats = windrpts['slon'].values, windrpts['slat'].values
    hlons, hlats = hailrpts['slon'].values, hailrpts['slat'].values
    tlons, tlats = tornrpts['slon'].values, tornrpts['slat'].values

    m = add_basemap(ax[-1])
    final_legend_list = []
    iax = -1
    
    for member, field in zip(members,fields):
        json_path = "/glade/scratch/ahijevyc/OBJECT_TRACK/track_data_"+ensemble_name+"_json_"+field+"/"

        terraintoo, watertoo = True, True # plot objects over water too
        if "WSPD10MAX" in field:
            watertoo = False # don't plot wind objects over water.
            terraintoo = False # don't plot wind objects anchored to terrain.

        iax = iax+1
        m = add_basemap(ax[iax])

        centroids = []
        object_sizes = []
        model_grid = ModelOutput(ensemble_name,
                                 member, run_date, field, run_date, 
                                 run_date+timedelta(hours=24),
                                 model_path,
                                 single_step=True)

        model_map_file="/glade/p/work/ahijevyc/hagelslag/mapfiles/VSE.txt"
        model_grid.load_map_info(model_map_file)
        model_grid.data = []

        # Initialze overlay_grid with zeros - counts number of times a points is occupied by an object
        # over the couse of a model run. One netCDF file is produced for each date and each member.
        # Later the output netCDF files can be merged to identify points with objects most 
        # frequently.  These are likely objects anchored to terrain.
        nobj_grid = np.zeros(model_grid.lat.shape, dtype=int)
        nobj_nc = json_path + run_date.strftime("%Y%m%d/") + member + run_date.strftime('/%Y%m%d%H.')+"object_count.nc"

        storm_tracks = load_json_tracks(json_path, run_date, member, model_format)
        if not storm_tracks:
            # Used to indicate a bug, but this happens legitimately if no objects are found (e.g. 20080225 no 0-3km UH>25 m2/s2)
            pass
        patches = []
        for track in storm_tracks:
            previous_centroid = None
            for time, mask, i, j in zip(track.times, track.masks, track.i, track.j):
                osize = track.size(time)
                if osize < pixel_thresh:
                    print 'skipping small object, %d pixels'% osize
                    continue

                polygon_xy = track.boundary_polygon(time)
                
                # Get ij indices covered by object and increment nobj_grid by 1.
                ni = (mask * i)[np.nonzero(mask)]
                nj = (mask * j)[np.nonzero(mask)]
                nobj_grid[ni,nj] = nobj_grid[ni,nj] + 1

                if not watertoo and np.mean(landmask[ni,nj]) < 0.5:
                    tmp = track.center_of_mass(time)
                    print "object >50% over water. ignoring", model_grid.proj(*tmp, inverse=True)
                    continue
                average_obj_frequency = np.mean(nobj[ni,nj])
                if not terraintoo and average_obj_frequency > 50:
                    tmp = track.center_of_mass(time)
                    print "average obj frequency", average_obj_frequency
                    print "may be anchored to terrain. ignoring", model_grid.proj(*tmp, inverse=True)
                    continue
                    
                object_sizes.append(osize)
                centroid = track.center_of_mass(time)
                # convert centroid from xkm ykm to lon lat
                centroid = model_grid.proj(*centroid, inverse=True)
                centroids.append(centroid)

                # expand polygon_lonlat into two arguments (asterisk)
                # Feed to m() map transformation
                # Transpose 2xN array to Nx2. 
                # Sometimes boundary_polygon returns 2 empty arrays (4 pixels?). Don't plot it if that is the case
                if polygon_xy[0].size > 0:
                    # use these values in xkm and ykm.
                    # Use model_grid.proj object to go to lonlat
                    polygon_lonlat = model_grid.proj(*polygon_xy, inverse=True)
                    patches.append(Polygon(np.transpose(m(*polygon_lonlat)), closed=True, fill=True))
                else:
                    print "no boundary polygon. size", track.size(time), "mask", mask

                # label storm objects with hour
                ax[iax].annotate(str(time), xy=m(*centroid), fontsize=5.1)
                # Draw line from previous object to this one.
                if previous_centroid is not None:
                    m.drawgreatcircle(previous_centroid[0], previous_centroid[1], centroid[0], centroid[1],color="black")
                previous_centroid = centroid

        output_netcdf_file(nobj_nc,nobj_grid,{"pixel_thresh":pixel_thresh},model_grid.__dict__)
        
        area = np.sum(object_sizes)*(model_grid.dx/1000)**2
        color = next(color_list)
        ax[iax].set_title(member + " " + run_date.strftime("%Y%m%d%H")+ " " + str(area) + " km" + r'$^2$' +"\n"+
                          field+ r'$ \geq$' +"%d pixels"%pixel_thresh + " watertoo: " + str(watertoo) +
                          " terraintoo: " + str(terraintoo))
        alpha = 0.38
        pc = PatchCollection(patches, color=color, alpha=alpha)
        ax[iax].add_collection(pc) # Plot individual member

        ax[iax].legend(handles=[Patch(color=color,alpha=alpha,label=member)],loc="upper left",numpoints=1,fontsize=9)
        # For some reason, if I substitute the literal PatchCollection instance below with 
        # the variable "pc", no patches appear in any of the subplots.
        # Tack on individual member to final, composite plot.
        # First member is opaque; successive members are more transparent
        alpha = 1-np.sqrt(float(iax)/len(members))
        ax[-1].add_collection(PatchCollection(patches, color=color, alpha=alpha))
        final_legend_list.append(Patch(color=color,alpha=alpha,label=member))

        if len(centroids) > 0:
            # Get area-weighted centroid for all objects
            overall_centroid = np.average(centroids, weights = object_sizes, axis=0)
            m.plot(*overall_centroid, marker='x', color=color, markersize=40, latlon=True)
            centroid_on_final_panel, = m.plot(*overall_centroid, marker='x', color=color, 
                                              label = member + ' centroid', markersize=40, latlon=True, 
                                              linestyle='None', ax=ax[-1])
            final_legend_list.append(centroid_on_final_panel)
            
    
    ax[-1].set_title(','.join(members)+ " " + run_date.strftime("%Y%m%d%H")+"\n"+'_'.join(fields)+ r'$ \geq$'+ "%d pixels"%pixel_thresh)

    norpts_str = ".norpts"
    if SPC_rpts:
        wrpts, = m.plot(wlons, wlats, 'bo', markersize=2.5, marker="s", latlon=True, label="Wind Report", alpha=0.5, ax=ax[-1])
        hrpts, = m.plot(hlons, hlats, 'go', markersize=2.8, marker="^", latlon=True, label="Hail Report", alpha=0.5, ax=ax[-1])
        trpts, = m.plot(tlons, tlats, 'ro', markersize=3.0, marker="v", latlon=True, label="Tornado Rpt", alpha=0.5, ax=ax[-1])
        final_legend_list.extend([wrpts, hrpts, trpts])
        norpts_str = ""


    ax[-1].legend(handles=final_legend_list,loc="upper left",numpoints=1,fontsize=7.,framealpha=0.5)
    ofile = odir + '_'.join(members) + "_" + run_date.strftime("%Y%m%d%H_") + '_'.join(fields) + norpts_str + ".png" # multi-row plot: 1 per member, plus 1 all-member composite.
    ret = mysavfig(ofile)

In [ ]:
# Map the number of objects on each grid point. Assumes this has already been made with ncea. 
# Reads 'history' attribute to get number of files. Assumes each file is sum of 24 times.


basedir = "/glade/scratch/ahijevyc/OBJECT_TRACK/"
obj_count_file = basedir + "v.nc"
obj_count_file = basedir + "track_data_VSE_json_MAX_UPDRAFT_HELICITY_87/20070224/1km_on_3km_pbl1/2007022412.object_count.nc"
obj_count_file = basedir + "track_data_VSE_json_UP_HELI_MAX03_77/20070224/1km_on_3km_pbl1/2007022412.object_count.nc"

ncf = Dataset(obj_count_file)
nobj = ncf.variables["count"][:]
if 'history' in ncf.ncattrs():
    history = ncf.getncattr('history')
    ncf.close()
    print "reading global history attribute"


    # Assumes global 'history' attribute is written by NCO tool.
    #// global attributes:
    #		:history = "Mon Mar 27 15:53:03 2017: ncea -y ttl track_data_VSE_json_MAX_UPDRAFT_HELICITY_25/20050113/3km_pbl7/2005011312.object_count.nc track_data_VSE_json_MAX_UPDRAFT_HELICITY_25/20051228/3km_pbl7/2005122812.object_count.nc track_data_VSE_json_MAX_UPDRAFT_HELICITY_25/20060113/3km_pbl7/2006011312.object_count.nc track_data_VSE_json_MAX_UPDRAFT_HELICITY_25/20070104/3km_pbl7/2007010412.object_count.nc track_data_VSE_json_MAX_UPDRAFT_HELICITY_25/20070107/3km_pbl7/2007010712.object_count.nc track_data_VSE_json_MAX_UPDRAFT_HELICITY_25/20070212/3km_pbl7/2007021212.object_count.nc track_data_VSE_json_MAX_UPDRAFT_HELICITY_25/20070213/3km_pbl7/2007021312.object_count.nc track_data_VSE_json_MAX_UPDRAFT_HELICITY_25/20070224/3km_pbl7/2007022412.object_count.nc track_data_VSE_json_MAX_UPDRAFT_HELICITY_25/20080212/3km_pbl7/2008021212.object_count.nc track_data_VSE_json_MAX_UPDRAFT_HELICITY_25/20080216/3km_pbl7/2008021612.object_count.nc track_data_VSE_json_MAX_UPDRAFT_HELICITY_25/20080217/3km_pbl7/2008021712.object_count.nc track_data_VSE_json_MAX_UPDRAFT_HELICITY_25/20080225/3km_pbl7/2008022512.object_count.nc track_data_VSE_json_MAX_UPDRAFT_HELICITY_25/20081209/3km_pbl7/2008120912.object_count.nc track_data_VSE_json_MAX_UPDRAFT_HELICITY_25/20090218/3km_pbl7/2009021812.object_count.nc track_data_VSE_json_MAX_UPDRAFT_HELICITY_25/20091224/3km_pbl7/2009122412.object_count.nc track_data_VSE_json_MAX_UPDRAFT_HELICITY_25/20100120/3km_pbl7/2010012012.object_count.nc track_data_VSE_json_MAX_UPDRAFT_HELICITY_25/20101231/3km_pbl7/2010123112.object_count.nc track_data_VSE_json_MAX_UPDRAFT_HELICITY_25/20110228/3km_pbl7/2011022812.object_count.nc track_data_VSE_json_MAX_UPDRAFT_HELICITY_25/20111222/3km_pbl7/2011122212.object_count.nc track_data_VSE_json_MAX_UPDRAFT_HELICITY_25/20120122/3km_pbl7/2012012212.object_count.nc track_data_VSE_json_MAX_UPDRAFT_HELICITY_25/20120125/3km_pbl7/2012012512.object_count.nc track_data_VSE_json_MAX_UPDRAFT_HELICITY_87/20070107/1km_on_3km_pbl7/2007010712.object_count.nc track_data_VSE_json_MAX_UPDRAFT_HELICITY_87/20090218/1km_on_3km_pbl7/2009021812.object_count.nc t.nc" ;
    # Created by ncea -y ttl track_data_VSE_json_UP_HELI_MAX03_??/20*/*3km_pbl1/20*12.object_count.nc t.nc
    # in /glade/scratch/ahijevyc/OBJECT_TRACK

    dates = [x for x in history.split(' ') if '.object_count.nc' in x]
    vmax = 2.
else:
    # start with "track_data_" part of file path
    dates = [obj_count_file[obj_count_file.index('track_data_'):]]
    vmax = 20.

print dates
ntimes = len(dates) * 24
print "got",ntimes,"times"

fields = [x.split('/')[0].split('json_')[-1] for x in dates]
members = [ x.split('/')[2] for x in dates]
field_members = [x + ' ' + y for x,y in zip(fields,members)]

field_members = set(field_members) # set will ensure unique members
fields = set(fields)# set will ensure unique members
print "fields=",fields
members = set(members)# set will ensure unique members
print "members=",members
print "got", field_members

if 'pixel_thresh' in ncf.ncattrs():
    pixel_thresh = ncf.getncattr('pixel_thresh')
    print "got pixel_thresh=",pixel_thresh,"from netcdf file"
else:
    pixel_thresh = 1
    print "assuming pixel_thresh=1"
    
    
    
print np.max(nobj)    
    
# Sanity check. Make sure all model_grids are the same.
run_date = dt.datetime(2011,2,28,12) # arbitrary date
model_grids = []
for field, member in zip(fields,members):
    model_grids.append(ModelOutput('VSE', member, run_date, field, run_date,
                                    run_date+timedelta(hours=24),"/glade/scratch/ahijevyc/VSE/",
                                    single_step=True))
for model_grid in model_grids:
    if model_grid.proj != model_grids[0].proj:
        print model_grids[0], model_grid
        print model_grids[0].proj, model_grid.proj
        sys.exit(1)
    
model_map_file="/glade/p/work/ahijevyc/hagelslag/mapfiles/VSE.txt"
model_grid.load_map_info(model_map_file)


fig, ax = plt.subplots(figsize=(8.5,5))
m = add_basemap(ax)
# convert to number of objects to percentage, mask pixels with no objects
image = np.ma.array(nobj/float(ntimes)*100.,mask=nobj<1)
print np.max(image)
# sample the colormaps that you want to use. Use 128 from each so we get 256
# colors in total
colors1 = plt.cm.Blues(np.linspace(0., 1, 128))
colors2 = plt.cm.Greens_r(np.linspace(0, 1, 128))

# combine them and build a new colormap
colors = np.vstack((colors1, colors2))
import matplotlib.colors as mcolors
mymap = mcolors.LinearSegmentedColormap.from_list('my_colormap', colors)

cax = m.contourf(model_grid.lon,model_grid.lat, image, cmap=mymap,latlon=True,vmin=0., vmax=vmax)
ax.set_title('\n'.join(field_members) + "\nobjects " + r'$\geq$' + " %d pixels"%pixel_thresh + " %d times"%ntimes)
ax.set_ylim=(0,2)
cbar = fig.colorbar(cax,label="%")

mysavfig(obj_count_file + ".png")

In [ ]:
ncf = Dataset(basedir + "u.nc")
print ncf.variables
nobj = ncf.variables["count"][:]
print nobj.shape

In [ ]:
dir(plt.cm)

In [ ]:
import pdb

In [ ]: