These are some more advanced worked examples demonstrating features, generally for members of the Dept. of Geosciences. Each one should be stand-alone, ie does not require any of the other things that are done.
In [1]:
# modules we'll need...
import iris
import iris.analysis.cartography # <-- for grid-cell weighting
import iris.coord_categorisation # <-- for creating new season coords
import iris.quickplot # <-- wrapper for easy plotting of Iris cubes
import matplotlib.dates # <-- tools for generating lists of dates easily
import matplotlib.pyplot # <-- generaly plotting tools
# load..
sat = iris.load_cube('~/data/HadCRUT*.nc', \
'near_surface_temperature_anomaly')
# Average in space..
def area_avg(cube):
"""
Simple function to calculate the area-average.
Makes lots of assumptions!
"""
# --> add bounds
cube.coord('latitude').guess_bounds()
cube.coord('longitude').guess_bounds()
# --> calculate weight of each cell
cell_size = iris.analysis.cartography.area_weights(cube)
# --> perform average
return cube.collapsed(['latitude','longitude'], \
iris.analysis.MEAN, weights=cell_size)
sat_ts = area_avg(sat)
# Calculate the winter average..
# --> add 'season' coordinates
iris.coord_categorisation.add_season(sat_ts,'time')
iris.coord_categorisation.add_season_year(sat_ts,'time')
# --> group data by season and season-year, and average
sat_ts_seasons = sat_ts.aggregated_by(['season','season_year'],\
iris.analysis.MEAN)
# --> get just the winters
sat_ts_djf = sat_ts_seasons.extract(iris.Constraint(season='djf'))
# Calculate a decadally-smoothed form of the data
# At the moment rolling_window has a bug, so we need to remove
# any dependent string coordinate (e.g. season) first
# --> remove string coordinate
sat_ts_djf.remove_coord('season')
# --> apply a rolling-window, averaging, in chunks of 10
smooth = sat_ts_djf.rolling_window('time', iris.analysis.MEAN, 10)
# Make our plot..
# --> make a larger canvas
matplotlib.pyplot.figure(figsize=(8,8))
# --> plot the raw data in grey
iris.quickplot.plot(sat_ts_djf ,'grey')
# --> add a straight-line at 0
matplotlib.pyplot.axhline(0, linestyle='--', color='grey')
# --> add out smoothed data, in red, and thicker
iris.quickplot.plot(smooth, 'r', linewidth=2.0)
# We don't really like the position of those x-axis ticks, so let's
# change those up.
# --> get a handle to the x-axis..
xaxis = matplotlib.pyplot.gca().xaxis
# --> use matplotlib's generator to make date ticks every 20 years
xaxis.set_major_locator(matplotlib.dates.YearLocator(20))
# Display..
matplotlib.pyplot.show()
Here we..
In [10]:
# modules we'll need
import iris
import iris.coord_categorisation
# load data..
sat = iris.load_cube('~/data/HadCRUT*.nc',\
'near_surface_temperature_anomaly')
# get a timeseries by averaging in space..
# (for ease I've just re-used the function from the first example)
sat_ts = area_avg(sat)
# Add a 'pentad' coordinate.
# --> start by adding the day-of-year..
iris.coord_categorisation.add_day_of_year(sat_ts,'time')
# --> define a helpful translation function..
def pentad_of_year(coord,point):
return floor(point/5)
# --> use that function to create a new 'pentad' coordinate.
iris.coord_categorisation.add_categorised_coord( \
sat_ts, "pentad", sat_ts.coord('day_of_year'), \
pentad_of_year,units='1')
# show that worked..
print "Day of year:",sat_ts.coord('day_of_year').points
print "Pentad nos:",sat_ts.coord('pentad').points
print '--'*50
# Calculate a climatology for 1850--1880, and 1980--2010..
# --> add a 'year' label, to make it easier to select data
iris.coord_categorisation.add_year(sat_ts,'time')
sat_ts_start = sat_ts.extract(iris.Constraint(year=lambda yr:1850<yr<1880))
sat_ts_end = sat_ts.extract(iris.Constraint(year=lambda yr:1980<yr<2010))
# --> aggregate for each...
sat_ts_start = sat_ts_start.aggregated_by('pentad',iris.analysis.MEAN)
sat_ts_end = sat_ts_end.aggregated_by('pentad',iris.analysis.MEAN)
# Plot!
# --> create an axis in the first position of a 1 row * 2 column figure
#matplotlib.pyplot.subplot2grid((1,2),(0,0))
# --> plot the climatologies..
iris.quickplot.plot(sat_ts_start,'b',label='1850--1880')
iris.quickplot.plot(sat_ts_end,'r',label='1980--2010')
# Display..
matplotlib.pyplot.show()
#23456789012345678901234567890123456789012345678901234567890123456789012
Note that the 'x' axis is wrong, it hasn't plotted the coordinate, instead it has plotted the value against based on it's position in the list. Here's how we specify what to plot against...
In [11]:
iris.quickplot.plot(sat_ts_start.coord('pentad'),sat_ts_start,'b',\
label='1850--1880')
iris.quickplot.plot(sat_ts_end.coord('pentad'),sat_ts_end,'r',\
label='1980--2010')
matplotlib.pyplot.legend(loc='center right')
matplotlib.pyplot.show()
Well that's weird. There must be something up with the coord dimension...
In [4]:
print sat_ts_start
print '--'*50
print sat_ts_start.coord('day_of_year').points
Hmm, what's going on with the underlying data. Reextract..
In [7]:
tmp = sat_ts.extract(iris.Constraint(year=lambda yr:1850<yr<1855))
for i in range(4):
print tmp.coord('pentad').points[i*12:(i+1)*12]
print '--'*50
import numpy
print numpy.unique(tmp.coord('pentad').points)
print len(numpy.unique(tmp.coord('pentad').points))
That seems largely fine. There are a few years where the . Maybe we should try removing the other dimensions before aggregating??
In [9]:
# remove all other dimensions..
tmp2 = tmp.copy()
tmp2.remove_coord('time')
tmp2.remove_coord('day_of_year')
tmp2.remove_coord('year')
# check it worked..
print tmp2
# do the aggregation..
print tmp2.aggregated_by('pentad',iris.analysis.MEAN).coord('pentad').points
In [8]:
print sat_ts.aggregated_by('pentad',iris.analysis.MEAN).coord('pentad')
Examples to build next...
ASIDE: This is how to get 'safe' division (ie always treat numbers as floats) which is the standard in Python 3, available now..
In [2]:
from __future__ import division
2/3
Out[2]: