In [1]:
import numpy as np
import matplotlib.pyplot as plt
import scipy.ndimage
from scipy.optimize import curve_fit
#Create ice data arrays
year, ice_july, ice_aug, ice_sept = np.loadtxt("icedata.txt", dtype=float, unpack=True)
ice_average = np.zeros(len(year))
for i in range(len(year)):
ice_average[i] = ice_july[i] + ice_aug[i] + ice_sept[i] / 3.0
def box_car_smooth(data):
"""
Uses the box_car smoothing over a period of width 5.
Loop starts at the highest data point and decreases in years as to
retain the most current data.
Returns: smoothed array
"""
box_car_array = np.zeros(len(data))
width = 5
i = len(data)-1
while i >= width:
average = 0.0
for n in range(width):
average += data[i-n]
average = average / width
box_car_array[i] = average
i = i-1
for i in range(len(box_car_array)):
if box_car_array[i] == 0:
box_car_array[i] = box_car_array[width]
return box_car_array
def gaussian_smooth(data):
"""
Smooths data using built in scipy.gaussian_filter1d with width=7
Returns: smoothed data
"""
width = 7
gauss_array = scipy.ndimage.filters.gaussian_filter1d(data, width, mode='nearest')
return gauss_array
def exponential_smooth(data, A):
"""
Smooths data using an exponential smoothing technique.
A: smoothing factor between 0 and 1
Returns: smoothed data
"""
A = A #smoothing factor
exp_array = np.zeros(len(data))
for i in range(len(data)):
if i == 0:
exp_array[0] = data[0]
else:
exp_array[i] = A*data[i] + (1-A)*exp_array[i-1]
return exp_array
def poly_fit(years, data):
"""
Uses built in numpy.polyfit() function to fit a curve of degree 3.
Also adds constant data points at the end/beginning of each array
to anchor the end points and create a better fit
Returns: polynomial array of len(data) without extra endpoints
"""
data_long = data
first = data_long[0]
last = data_long[-1]
for i in range(5):
data_long = np.append(data_long, last)
data_long = np.insert(data_long, 0, first)
year_long = years
first = year_long[0]
last = year_long[-1]
begin = []
end = []
for i in range(1,6):
end.append(last+i)
begin.append(first-i)
begin.reverse()
year_long = np.append(year_long, end)
year_long = np.insert(year_long, 0, begin)
pfit = np.polyfit(year_long, data_long, 3, full=True)
x3=pfit[0][0]
x2=pfit[0][1]
x1=pfit[0][2]
intercept=pfit[0][3]
poly_data = np.zeros(len(data_long))
for i in range(len(data_long)):
z = year_long[i]
poly_data[i] = x3*(z**3) + x2*(z**2) + x1*(z) + intercept
poly_data = poly_data[5:-5]
return poly_data
def func(x, a, b, c, d):
return a*np.exp(-c*(x-b))+d
plt.figure(1)
plt.subplot(4,1,1)
plt.title("Ice Average")
plt.grid(True)
plot_avg = plt.plot(year, ice_average, 'r')
plt.subplot(4,1,2)
plt.title("Box Car smoothing")
plt.grid(True)
plot2 = plt.plot(year, box_car_smooth(ice_average), 'r')
plt.subplot(4,1,3)
plt.title("Gaussian kernal smoothing")
plt.grid(True)
plot3 = plt.plot(year, gaussian_smooth(ice_average), 'r')
plt.subplot(4,1,4)
plt.title("Exponential smoothing")
plt.grid(True)
plot4 = plt.plot(year, exponential_smooth(ice_average, 0.3), 'r')
plt.figure(2)
plt.title("Ratio of ice of September compared to July")
plt.xlabel("September / July ice (sq km)")
plt.ylabel("Frequency")
plt.grid(True)
ratio_july_sept = np.zeros(len(year))
for i in range(len(year)):
ratio_july_sept[i] = ice_sept[i] / ice_july[i]
hist_ratio = plt.hist(ratio_july_sept, bins=40)
print "Ratio of sea ice change in last 15 years:", np.mean(ratio_july_sept[-15:])
plt.figure(3)
plt.title("Polynomial fit test")
ice_pre1975 = ice_average[:105]
year_pre1975 = year[:105]
plt.subplot(2,1,1)
plt.xlabel("Year")
plt.ylabel("Ice (km^2)")
plt.legend(["Ice data no smoothing", "Baseline polynomial deg=3"])
plt.grid(True)
plot_poly = plt.plot(year_pre1975, ice_pre1975, 'r',
year_pre1975, poly_fit(year_pre1975, ice_pre1975), 'b')
plt.subplot(2,1,2)
plt.xlabel("Year")
plt.ylabel("Ice level above baseline")
plt.legend(["Ice Data", "Baseline polynomial"])
#plt.figure(3).gca().set_xticks(year_pre1975)
plt.grid(True)
sub_baseline = ice_pre1975 - poly_fit(year_pre1975, ice_pre1975)
plot_sub = plt.plot(year_pre1975, sub_baseline, 'r', year_pre1975, np.zeros(len(year_pre1975)), 'b')
#Find area of each feature using the composite trapazoidal rule
area_cold_feature1 = np.trapz(sub_baseline[71:87], dx=1)
area_cold_feature2 = np.trapz(sub_baseline[98:-1], dx=1)
print "Area of first feature 1941-1956: %.3f" % area_cold_feature1
print "Area of second feature 1968-1973: %.3f" % area_cold_feature2
print "\n"
plt.figure(4)
plt.title("The last sea ice, A=smoothing constant")
plt.xlabel("Years since 1979")
plt.ylabel("Ice (km^2)")
plt.legend(["Ice data A=0.5", "Exponential trend line"])
plt.grid(True)
x = np.arange(0,37)
ice_data_1979 = ice_sept[109:]
ice_data_1979 = exponential_smooth(ice_data_1979, 0.5) #A exponetial smoothing
#ice_data_1979 = gaussian_smooth(ice_data_1979) #B gaussian kernal
popt, pcov = curve_fit(func, x, ice_data_1979, [-350, 100, -0.075, 7.5 ]) #A exponential smoothing
#popt, pcov = curve_fit(func, x, ice_data_1979, [-130, 140, -0.04, 8 ]) #B gaussian kernal
plot4 = plt.plot(x, ice_data_1979, 'r')
plot4 = plt.plot(x, func(x,*popt), 'b')
#Last ice:
#x-intercept of my exponential curve fit given by y=0,
# a * e^(-c(x-b)) + d = 0
#Solve for x,
# -1/c * ln(-d/a) + b = x
last_ice = -1/popt[2] * np.log(-popt[3]/popt[0]) + popt[1]
print "We will run out of ice in:"
print "%.2f" % (1979 + last_ice)
print "if we do not stop our destructive ways or limit further melting."
plt.show()
In [ ]:
In [ ]:
In [ ]: