In [121]:
import glob
import os
from netCDF4 import Dataset
import matplotlib.pyplot as plt
import matplotlib.gridspec as gridspec
import matplotlib.patches as patches
import matplotlib.patheffects as PathEffects
import cartopy.crs as ccrs
import cartopy
import numpy as np
import scipy.misc as misc
from pyhdf.SD import SD, SDC
from netCDF4 import Dataset
import h5py
import pyresample as pr
import pyproj
import pandas as pd
from sklearn.neighbors import KDTree, BallTree
import scipy
from shapely.geometry import Point
import seaborn as sns
from datetime import datetime
from dateutil.parser import parse
from skimage import exposure
import scipy.interpolate as interpolate
import matplotlib.ticker as mticker
from cartopy.mpl.gridliner import LONGITUDE_FORMATTER, LATITUDE_FORMATTER
In [3]:
cd ..
In [111]:
import src.config.filepaths as fp
import src.features.fre_to_tpm.viirs.ftt_utils as ut
import src.features.fre_to_tpm.viirs.ftt_fre as ft
import src.data.readers.load_hrit as load_hrit
In [5]:
outpath = "/Users/danielfisher/Dropbox/working_documents/papers/oda-extremefireemissions/figures"
In [646]:
geom_geo_list, data_utm_list, geom_utm_list, time_stamps = setup_plume_data()
In [637]:
peat_data = create_peat_map_dict()
In [668]:
peat_data['peatSumatra']['lons'][0]
Out[668]:
In [743]:
plt.close('all')
fig = plt.figure(figsize=(15,19))
gs = gridspec.GridSpec(4, 4)
gs.update(wspace=0.21, hspace=0.11)
ax_list = []
min_lat_all = 90
max_lat_all = -90
min_lon_all = 180
max_lon_all = -180
plume_mean_lats = []
plume_mean_lons = []
plume_id= []
for i in xrange(13):
ax_list.append(plt.subplot(gs[i], projection=ccrs.PlateCarree()))
for i, ds in enumerate(data_utm_list):
# sort out aod
aod_combined = extract_combined_aod_full_plume(ds, geom_geo_list[i]['plume_mask'])
#aod_combined = np.ma.masked_array(aod_combined, geom_geo_list[i]['plume_mask'] == 0)
aod_combined = interp_aod(aod_combined, geom_geo_list[i]['plume_mask'])
aod_combined = np.ma.masked_array(aod_combined, geom_geo_list[i]['plume_mask'] == 0)
if i in [0,1,2,3,4]:
af = 0.02
else:
af = 0.001
lats = geom_geo_list[i]['plume_lats']
lons = geom_geo_list[i]['plume_lons']
plume_mean_lats.append(np.mean(lats))
plume_mean_lons.append(np.mean(lons))
plume_id.append(i)
min_lon = np.min(lons)
max_lon = np.max(lons)
min_lat = np.min(lats)
max_lat = np.max(lats)
img_extent = (min_lon, max_lon, min_lat, max_lat)
# check to see if any coords exced current minima
if min_lon < min_lon_all:
min_lon_all = min_lon
if max_lon > max_lon_all:
max_lon_all = max_lon
if min_lat < min_lat_all:
min_lat_all = min_lat
if max_lat > max_lat_all:
max_lat_all = max_lat
r = misc.imresize(ds['m5_plume'] * (251.0/ds['m5_plume'].max()), (256,256,3)).astype('int')
r = exposure.equalize_adapthist(r, clip_limit=af)
g = misc.imresize(ds['m4_plume'] * (251.0/ds['m4_plume'].max()), (256,256,3)).astype('int')
g = exposure.equalize_adapthist(g, clip_limit=af)
b = misc.imresize(ds['m3_plume'] * (251.0/ds['m3_plume'].max()), (256,256,3)).astype('int')
b = exposure.equalize_adapthist(b, clip_limit=af)
rgb = np.dstack((r,g,b))
ax_list[i].imshow(rgb, origin='upper', extent=img_extent, transform=ccrs.PlateCarree())
pc = ax_list[i].imshow(aod_combined, origin='upper', extent=img_extent, transform=ccrs.PlateCarree(),
alpha=0.7, vmin=0, vmax=5, cmap='rainbow')
gl = ax_list[i].gridlines(crs=ccrs.PlateCarree(), draw_labels=True,
linewidth=2, color='gray', alpha=0.5, linestyle='--')
gl.xlabels_top = False
gl.ylabels_right = False
gl.xlocator = mticker.FixedLocator(np.arange(100, 130, 0.5))
gl.ylocator = mticker.FixedLocator(np.arange(-5, 5, 0.5)*-1)
gl.xformatter = LONGITUDE_FORMATTER
gl.yformatter = LATITUDE_FORMATTER
ax_list[i].text(0.85, 0.08, str(i+1), transform=ax_list[i].transAxes, fontsize=20, color='white')
ax_list[i].set_aspect('auto')
# add colorbar
cb_ax = fig.add_axes([0.92, 0.32, 0.02, 0.5])
cbar = fig.colorbar(pc, cax=cb_ax)
cbar.set_label('AOD', fontsize=20)
cbar.solids.set_edgecolor("face")
plume_mean_lats = np.array(plume_mean_lats)
plume_mean_lons = np.array(plume_mean_lons)
plume_id = np.array(plume_id)
x_shift = [0.04,0.04,0.04,0.04,0.04,0.02,0,-0.15,0.04,0.04,-0.22,-0.22,-0.22]
y_shift = [-0.04,0,0,0,-0.05,0.02,0.03,0,0,0,0,-0.05,0]
# draw sumatra map
sumatra_extent = (104.5, 106.5, -3.5, -2.25)
ax_map_sumatra = plt.subplot(gs[-1, 1], projection=ccrs.PlateCarree())
ax_map_sumatra.set_extent(sumatra_extent)
#ax_map_sumatra.set_aspect('auto')
# plot the peat data
mask = ((peat_data['peatSumatra']['lons'] > sumatra_extent[0]) &
(peat_data['peatSumatra']['lons'] < sumatra_extent[1]) &
(peat_data['peatSumatra']['lats'] > sumatra_extent[2]) &
(peat_data['peatSumatra']['lats'] < sumatra_extent[3]) &
(peat_data['peatSumatra']['is_peat'] == 1)
)
ax_map_sumatra.plot(peat_data['peatSumatra']['lons'][mask],
peat_data['peatSumatra']['lats'][mask],
's', color='chocolate', transform=ccrs.PlateCarree(), markersize=0.25, alpha=1)
ax_map_sumatra.add_feature(cartopy.feature.GSHHSFeature(scale='full', levels=[1, 2, 3, 4]))
sumatra_mask = plume_mean_lons < 107
for lat, lon, i in zip(plume_mean_lats[sumatra_mask], plume_mean_lons[sumatra_mask], plume_id[sumatra_mask]):
ax_map_sumatra.plot(lon, lat, '.', markeredgecolor='w', markerfacecolor='k', markersize=16)
txt = ax_map_sumatra.text(lon+x_shift[i], lat+y_shift[i], str(i+1), fontsize=14)
txt.set_path_effects([PathEffects.withStroke(linewidth=2, foreground='w')])
ax_map_sumatra.plot(104.778, -2.968, '*', markeredgecolor='w', markerfacecolor='r', markersize=14)
txt = ax_map_sumatra.text(104.5, -3.05, "Palembang", fontsize=12)
txt.set_path_effects([PathEffects.withStroke(linewidth=2, foreground='w')])
gl = ax_map_sumatra.gridlines(crs=ccrs.PlateCarree(), draw_labels=True,
linewidth=2, color='gray', alpha=0.5, linestyle='--')
gl.xlabels_top = False
gl.ylabels_right = False
gl.xlocator = mticker.FixedLocator(np.arange(102, 132, 0.5))
gl.ylocator = mticker.FixedLocator(np.arange(-5, 7, 0.5)*-1)
gl.xformatter = LONGITUDE_FORMATTER
gl.yformatter = LATITUDE_FORMATTER
ax_map_sumatra.set_aspect('auto')
# draw kalimantan map
extent_kalimantan = (108, 115, -4, 0.25)
ax_map_kali = plt.subplot(gs[-1, 2:], projection=ccrs.PlateCarree())
ax_map_kali.set_extent(extent_kalimantan)
mask = ((peat_data['peatKalimantan']['lons'] > extent_kalimantan[0]) &
(peat_data['peatKalimantan']['lons'] < extent_kalimantan[1]) &
(peat_data['peatKalimantan']['lats'] > extent_kalimantan[2]) &
(peat_data['peatKalimantan']['lats'] < extent_kalimantan[3]) &
(peat_data['peatKalimantan']['is_peat'] == 1)
)
ax_map_kali.plot(peat_data['peatKalimantan']['lons'][mask],
peat_data['peatKalimantan']['lats'][mask],
's', color='chocolate', transform=ccrs.PlateCarree(), markersize=0.05, alpha=1)
ax_map_kali.add_feature(cartopy.feature.GSHHSFeature(scale='full', levels=[1, 2, 3, 4]))
for lat, lon, i in zip(plume_mean_lats[~sumatra_mask], plume_mean_lons[~sumatra_mask], plume_id[~sumatra_mask]):
ax_map_kali.plot(lon, lat, '.', markeredgecolor='w', markerfacecolor='k', markersize=16)
txt = ax_map_kali.text(lon+x_shift[i], lat+y_shift[i], str(i+1), fontsize=16)
txt.set_path_effects([PathEffects.withStroke(linewidth=2, foreground='w')])
ax_map_kali.plot(113.87, -2.242, '*', markeredgecolor='w', markerfacecolor='r', markersize=14)
txt = ax_map_kali.text(113, -2.05, "Palangkaraya", fontsize=12)
txt.set_path_effects([PathEffects.withStroke(linewidth=2, foreground='w')])
gl = ax_map_kali.gridlines(crs=ccrs.PlateCarree(), draw_labels=True,
linewidth=2, color='gray', alpha=0.5, linestyle='--')
gl.xlabels_top = False
gl.ylabels_right = False
gl.xlocator = mticker.FixedLocator(np.arange(102, 132, 1))
gl.ylocator = mticker.FixedLocator(np.arange(-5, 7, 1)*-1)
gl.xformatter = LONGITUDE_FORMATTER
gl.yformatter = LATITUDE_FORMATTER
# plot the peat mat in the first axes
#plt.savefig(os.path.join(outpath, "figure_1.png"), dpi=600, bbox_inches='tight')
plt.show()
In [60]:
aeronet_station_data = load_aeronet()
In [62]:
prior_comparison_df = pd.read_csv('/Volumes/INTENSO/kcl-fire-aot/ODA/interim/dataframes/aeronet_comp_AMW.csv')
In [71]:
orac_dir = '/Volumes/INTENSO/kcl-fire-aot/ODA/raw/viirs/orac'
filepath_dict = {}
for fname in prior_comparison_df.orac_file.drop_duplicates():
# get station associated with it
stations = list(prior_comparison_df[prior_comparison_df.orac_file == fname]['station'])
# get updated filepath
fname = fname.split('/')[-1].replace("R4591AMW", "R0AR2")
fpath = os.path.join(orac_dir, fname)
filepath_dict[fpath] = stations
In [ ]:
data_dict = dict(x=[], y=[], dist=[], aod550_mean=[], aod550_median=[], aod550_sd=[],
orac_aod_mean=[], orac_cost_mean=[],
orac_aod_median=[], orac_cost_median=[], viirs_aod_mean=[], viirs_flag_mean=[],
viirs_aod_median=[], viirs_flag_median=[], station=[],
n_points_in_aeronet_mean=[], n_orac=[], n_viirs=[])
for filepath, stations in filepath_dict.iteritems():
timestamp = ut.get_timestamp(filepath, 'orac')
dt = datetime.strptime(timestamp, '_%Y%m%d%H%M_')
try:
sat_data = setup_sat_data(timestamp)
sat_data_utm = resample_satellite_datasets(sat_data, fill_value=-999)
except:
print('failed')
continue
# get lat lons
mask = sat_data_utm['lats'] != -999
resampled_lats_sub = sat_data_utm['lats'][mask]
resampled_lons_sub = sat_data_utm['lons'][mask]
rows = np.arange(sat_data_utm['lats'].shape[0])
cols = np.arange(sat_data_utm['lats'].shape[1])
cols, rows = np.meshgrid(cols, rows)
cols = cols[mask]
rows = rows[mask]
# generate ball tree for satellite data
lat_lon = np.dstack([np.deg2rad(resampled_lats_sub),
np.deg2rad(resampled_lons_sub)])[0]
balltree = BallTree(lat_lon, metric='haversine')
# iterate over each station
for station in stations:
station_df = aeronet_station_data[station]
x, y, dist, n_aeronet, aod550_mean, aod550_median, aod550_sd = collocate_station(station_df,
balltree, cols, rows,
dt)
print 'image lat', sat_data_utm['lats'][y, x]
print 'image lon', sat_data_utm['lons'][y, x]
print()
output_dict = get_aod(sat_data_utm, x, y)
# append to dict
data_dict['x'].append(x)
data_dict['y'].append(y)
data_dict['dist'].append(dist)
data_dict['n_points_in_aeronet_mean'].append(n_aeronet)
data_dict['n_orac'].append(output_dict['n_orac'])
data_dict['n_viirs'].append(output_dict['n_viirs'])
data_dict['aod550_mean'].append(aod550_mean)
data_dict['aod550_median'].append(aod550_median)
data_dict['aod550_sd'].append(aod550_sd)
data_dict['orac_aod_mean'].append(output_dict['mean_orac_aod'])
data_dict['orac_cost_mean'].append(output_dict['mean_orac_cost'])
data_dict['viirs_aod_mean'].append(output_dict['mean_viirs_aod'])
data_dict['viirs_flag_mean'].append(output_dict['mean_viirs_flag'])
data_dict['orac_aod_median'].append(output_dict['median_orac_aod'])
data_dict['orac_cost_median'].append(output_dict['median_orac_cost'])
data_dict['viirs_aod_median'].append(output_dict['median_viirs_aod'])
data_dict['viirs_flag_median'].append(output_dict['median_viirs_flag'])
data_dict['station'].append(station)
In [170]:
aod_comp_df = pd.DataFrame.from_dict(data_dict)
In [250]:
optical_prop_suffix = 'AMW'
aod_comp_df = pd.read_csv(os.path.join(fp.path_to_dataframes, 'aeronet_comp_' + optical_prop_suffix + '.csv'))
orac_aod_df = aod_comp_df[(aod_comp_df['orac_aod'] != -999)]
viirs_aod_df = aod_comp_df[(aod_comp_df['viirs_aod'] != -999)]
orac_aod_model_df = orac_aod_df[orac_aod_df.aod550 >= 2]
viirs_aod_model_df = viirs_aod_df[viirs_aod_df.aod550 < 2]
orac_invalid_df = orac_aod_df[orac_aod_df.aod550 < 2]
viirs_invalid_df = viirs_aod_df[viirs_aod_df.aod550 >= 2]
fig = plt.figure(figsize=(10,5))
ax0 = plt.subplot(121)
ax1 = plt.subplot(122)
ax0.set_xlim([0,10])
ax1.set_xlim([0,2.5])
ax0.set_ylim([0,10])
ax1.set_ylim([0,2.5])
ax0.plot([-1000, 7500], [-1000, 7500], '--', color='grey')
ax1.plot([-1000, 6000], [-1000, 6000], '--', color='grey')
sns.regplot(orac_aod_model_df.aod550, orac_aod_model_df['orac_aod'], ax=ax0, scatter=True, color='k')
sns.regplot(viirs_aod_model_df.aod550, viirs_aod_model_df['viirs_aod'], ax=ax1, scatter=True, color='k')
sns.scatterplot(orac_invalid_df.aod550, orac_invalid_df['orac_aod'], ax=ax0, color='k', marker='x', alpha=0.5)
sns.scatterplot(viirs_invalid_df.aod550, viirs_invalid_df['viirs_aod'], ax=ax1, color='k', marker='x', alpha=0.5)
# stats
slope0, intercept0, r_value0, _, _ = scipy.stats.linregress(orac_aod_model_df.aod550, orac_aod_model_df['orac_aod'])
slope1, intercept1, r_value1, _, _ = scipy.stats.linregress(viirs_aod_model_df.aod550, viirs_aod_model_df['viirs_aod'])
# adjust orac based on aeronet
print slope0, intercept0
adjusted_orac = (orac_aod_model_df['orac_aod'] - intercept0) / slope0
#sns.regplot(orac_aod_df.aod550, adjusted_orac, ax=ax0, scatter=True, color='r')
orac_rmse = rmse(orac_aod_model_df.aod550, orac_aod_model_df['orac_aod'])
viirs_rmse = rmse(viirs_aod_model_df.aod550, viirs_aod_model_df['viirs_aod'])
orac_mean = np.mean(orac_aod_model_df['orac_aod'] - orac_aod_model_df.aod550)
viirs_mean = np.mean(viirs_aod_model_df['viirs_aod'] -viirs_aod_model_df.aod550)
orac_sd = np.std(orac_aod_model_df.aod550 - orac_aod_model_df['orac_aod'])
viirs_sd = np.std(viirs_aod_model_df.aod550 - viirs_aod_model_df['viirs_aod'])
textstr0 = '$R^2=%.3f$\n$RMSE=%.2f$\nMean($\pm$SD)=$%.2f(\pm%.2f)$\n Samples$=%.0f$' % (
r_value0 ** 2, orac_rmse, orac_mean, orac_sd, orac_aod_model_df.aod550.shape[0])
textstr1 = '$R^2=%.3f$\n$RMSE=%.2f$\nMean($\pm$SD)=$%.2f(\pm%.2f)$\n Samples$=%.0f$' % (
r_value1 ** 2, viirs_rmse, viirs_mean, viirs_sd, viirs_aod_model_df.aod550.shape[0])
props = dict(boxstyle='round', facecolor='white', alpha=0.5)
ax0.text(0.45, 0.25, textstr0, transform=ax0.transAxes, fontsize=10,
verticalalignment='top', bbox=props)
ax1.text(0.45, 0.25, textstr1, transform=ax1.transAxes, fontsize=10,
verticalalignment='top', bbox=props)
ax0.set_xlabel('Aeronet 550 nm AOD', fontsize=14)
ax1.set_xlabel('Aeronet 550 nm AOD', fontsize=14)
ax0.set_ylabel('ORAC 550 nm AOD', fontsize=14)
ax1.set_ylabel('VIIRS SP 550 nm AOD', fontsize=14)
# plt.savefig(os.path.join(fp.figure_dir, 'aeronet_comparison_' + optical_prop_suffix + '.png'),
# dpi=600, bbox_inches='tight')
plt.show()
In [218]:
path = '/Users/danielfisher/Dropbox/combined_samples_bias_adjusted.csv'
df = pd.read_csv(path)
In [219]:
df.drop([1, 5, 6, 7, 17], inplace=True)
In [220]:
# adjust the FRE of plume 0 and plume 13 as the flows are not correct - adjustment made by eye
df.fre.loc[0] *= 0.5
df.fre.loc[13] *= 0.66
In [225]:
df.head(20)
Out[225]:
In [247]:
plt.close('all')
fig = plt.figure(figsize=(8,8))
ax0 = plt.subplot(111)
# stats
slope0, intercept0, r_value0, _, _ = scipy.stats.linregress(df.fre, df.tpm)
x = df.fre.values
y = df.tpm
x = x[:, np.newaxis]
a, _, _, _ = np.linalg.lstsq(x, y)
# plot the fit
plot_x = np.arange(0, x.max()+ 1000000, 1000)
ax0.plot(plot_x, plot_x*a, 'k-', alpha=0.5)
ax0.plot(df.fre.values, df.tpm, 'ko')
for i in xrange(df.shape[0]):
ax0.text(df.fre.iloc[i]+200000, df.tpm.iloc[i], str(i+1))
ax0.set_xlim(0)
ax0.set_ylim(0)
textstr0 = '$R^2=%.3f$\nTPM=$%.3f$ * FRE\nSamples$=%.0f$' % (
r_value0 ** 2, a, df.fre.shape[0])
props = dict(boxstyle='round', facecolor='white', alpha=0.5)
ax0.text(0.05, 0.95, textstr0, transform=ax0.transAxes, fontsize=16,
verticalalignment='top', bbox=props)
ax0.set_xlabel('FRE (MJ)', fontsize=16)
ax0.set_ylabel('TPM (g)', fontsize=16)
# plt.savefig(os.path.join(fp.figure_dir, 'aeronet_comparison_' + optical_prop_suffix + '.png'),
# dpi=600, bbox_inches='tight')
plt.show()
In [760]:
# get different emission coefficients
x = df.fre.values
x = x[:, np.newaxis]
y = df.tpm
# adjust FRE to show peat emisson instead https://agupubs.onlinelibrary.wiley.com/doi/full/10.1029/2005GL022678
peat_mec = 7.4 + 0.05
y_peat = df.tpm * 4.3 / peat_mec # m2/g
a, _, _, _ = np.linalg.lstsq(x, y_peat)
print('Peat mec emissions coefficient:', a[0])
In [ ]:
AOD c
In [18]:
# get the fire data
frp_df = ut.read_frp_df(fp.path_to_himawari_frp)
In [19]:
start_time = pd.datetime(2015,9,20)
stop_time = pd.datetime(2015,9,24)
frp_df_subset = frp_df.loc[(frp_df['obs_time'] <= stop_time) & (frp_df['obs_time'] >= start_time)]
In [20]:
frp_df_subset.obs_date = pd.to_datetime(frp_df_subset.obs_date).copy()
In [22]:
root_path = "/Users/danielfisher/Desktop/viirs_data_oda"
image_data = []
geo_data = []
for f in os.listdir(root_path):
print f
if "GMODO" in f:
image_data.append(h5py.File(os.path.join(root_path, f) , "r"))
elif "GMTCO" in f:
geo_data.append(h5py.File(os.path.join(root_path, f) , "r"))
else:
continue
In [23]:
def create_area_def(lats, lons):
_proj = pyproj.Proj(proj='eqc', ellps='WGS84', datum='WGS84')
mask = (lats < 90) & (lats > -90) & (lons < 180) & (lons > -180)
x, y = _proj(lons[mask], lats[mask])
min_x, max_x = np.min(x), np.max(x)
min_y, max_y = np.min(y), np.max(y)
extent = (min_x, min_y, max_x, max_y)
pixel_size = 750
x_size = int(np.round((extent[2] - extent[0]) / pixel_size))
y_size = int(np.round((extent[3] - extent[1]) / pixel_size))
area_id = 'eqc'
description = 'plate_carree'
proj_id = 'eqc'
proj_dict = {'units': 'm', 'proj': 'eqc', 'ellps': 'WGS84', 'datum': 'WGS84'}
return pr.geometry.AreaDefinition(area_id, description, proj_id, proj_dict,
x_size, y_size, extent)
def create_swath_def(lats, lons):
return pr.geometry.SwathDefinition(lats=lats, lons=lons)
def resample_platecarree(swath_def, area_def, arr):
return pr.kd_tree.resample_nearest(swath_def, arr, area_def, radius_of_influence=3000, fill_value=-999)
image_dicts = []
for ds, geo in zip(image_data, geo_data):
m3 = ds['All_Data']["VIIRS-M3-SDR_All"]["Radiance"][:]
m4 = ds['All_Data']["VIIRS-M4-SDR_All"]["Radiance"][:]
m5 = ds['All_Data']["VIIRS-M5-SDR_All"]["Radiance"][:]
lats = geo['All_Data']["VIIRS-MOD-GEO-TC_All"]["Latitude"][:]
lons = geo['All_Data']["VIIRS-MOD-GEO-TC_All"]["Longitude"][:]
# get the mask for the lats and lons and apply
null_mask = m3 <= -999
masked_lats = np.ma.masked_array(lats, null_mask)
masked_lons = np.ma.masked_array(lons, null_mask)
# create the resamples
area_def = create_area_def(masked_lats, masked_lons)
swath_def = create_swath_def(masked_lats, masked_lons)
d = {}
d['lats'] = resample_platecarree(swath_def, area_def, masked_lats)
d['lons'] = resample_platecarree(swath_def, area_def, masked_lons)
d['m3'] = resample_platecarree(swath_def, area_def, m3)
d['m4'] = resample_platecarree(swath_def, area_def, m4)
d['m5'] = resample_platecarree(swath_def, area_def, m5)
image_dicts.append(d)
In [58]:
plt.close('all')
fig = plt.figure(figsize=(15,12))
gs = gridspec.GridSpec(3, 3)
gs.update(wspace=0.05, hspace=0.01)
extent = (102, 106.5, -5.2, 2)
obs_dates = [pd.datetime(2015,9,22), pd.datetime(2015,9,23), pd.datetime(2015,9,24)]
for i, d in enumerate(image_dicts):
# get bounding box based on target lat/lon
coords = np.where((d['lats'] >= extent[2]) & (d['lats'] <= extent[3]) &
(d['lons'] >= extent[0]) & (d['lons'] <= extent[1]))
min_x = np.min(coords[1])
max_x = np.max(coords[1])
min_y = np.min(coords[0])
max_y = np.max(coords[0])
print min_y, max_y, min_x, max_x
sub_lats = d['lats'][min_y:max_y, min_x:max_x]
sub_lons = d['lons'][min_y:max_y, min_x:max_x]
mask = sub_lats > -999
img_extent = (np.min(sub_lons[mask]), np.max(sub_lons[mask]),
np.min(sub_lats[mask]), np.max(sub_lats[mask]))
print img_extent
r = image_histogram_equalization(d['m5'][min_y:max_y, min_x:max_x])
g = image_histogram_equalization(d['m4'][min_y:max_y, min_x:max_x])
b = image_histogram_equalization(d['m3'][min_y:max_y, min_x:max_x])
r = np.round((r * (255 / np.max(r))) * 1).astype('uint8')
g = np.round((g * (255 / np.max(g))) * 1).astype('uint8')
b = np.round((b * (255 / np.max(b))) * 1).astype('uint8')
# apply masked array
r[~mask] = 255
g[~mask] = 255
b[~mask] = 255
rgb = np.dstack((r, g, b))
ax_map = plt.subplot(gs[0:2, i], projection=ccrs.PlateCarree())
ax_map.set_extent(img_extent)
ax_map.add_feature(cartopy.feature.GSHHSFeature(scale='intermediate', levels=[1]))
ax_map.imshow(rgb, origin='upper',extent=extent, transform=ccrs.PlateCarree())
# plot singapore location
plt.plot(103.8198, 1.3521, 'r*',transform=ccrs.PlateCarree(), markersize=15)
# plot FRE BBOX
rect = patches.Rectangle((103.5, -1.7),0.9,-0.5,linewidth=2, linestyle="--", edgecolor='k',facecolor='none', transform=ccrs.PlateCarree())
ax_map.add_patch(rect)
gl = ax_map.gridlines(crs=ccrs.PlateCarree(), draw_labels=True,
linewidth=2, color='gray', alpha=0.5, linestyle='--')
gl.xlabels_top = False
gl.ylabels_right = False
gl.ylabels_left = False
gl.xlocator = mticker.FixedLocator(np.arange(100, 132.5, 1.25))
gl.xformatter = LONGITUDE_FORMATTER
if i == 0:
gl.ylocator = mticker.FixedLocator(np.arange(-5, 5, 1.25)*-1)
gl.yformatter = LATITUDE_FORMATTER
gl.ylabels_left = True
# add fires to axis as a hexbin
frp_df_temporal_subset = frp_df_subset[frp_df_subset.obs_date == obs_dates[i]]
frp_df_spatial_subset = frp_df_temporal_subset[(frp_df_temporal_subset.LATITUDE > img_extent[2]) &
(frp_df_temporal_subset.LATITUDE < img_extent[3]) &
(frp_df_temporal_subset.LONGITUDE > img_extent[0]) &
(frp_df_temporal_subset.LONGITUDE < img_extent[1])]
frp_df_spatial_subset = frp_df_spatial_subset[frp_df_spatial_subset.FRP_0 > 200]
pc = ax_map.hexbin(frp_df_spatial_subset.LONGITUDE, frp_df_spatial_subset.LATITUDE, frp_df_spatial_subset.FRP_0,
cmap='autumn',alpha=0.8, extent=extent, gridsize=60, edgecolor='k', transform=ccrs.PlateCarree())
# add the histogram plot
roi_bbox = (103.5, 103.5+0.9, -1.7-0.5, -1.7)
roi_spatial_subset = frp_df_temporal_subset[(frp_df_temporal_subset.LATITUDE > roi_bbox[2]) &
(frp_df_temporal_subset.LATITUDE < roi_bbox[3]) &
(frp_df_temporal_subset.LONGITUDE > roi_bbox[0]) &
(frp_df_temporal_subset.LONGITUDE < roi_bbox[1])][['FRP_0']]
ax_hist = plt.subplot(gs[2, i])
binwidth=100
ax_hist.hist(roi_spatial_subset.FRP_0.dropna(),
bins=np.arange(0,
roi_spatial_subset.FRP_0.max() + binwidth, binwidth),
facecolor='w',edgecolor='k')
ax_hist.set_yscale('log', nonposy='clip')
ax_hist.set_ylim([0,1500])
ax_hist.set_xlim([0,1500])
ax_hist.set_xlabel("FRP (MW)", fontsize=18)
if i != 0:
ax_hist.set_yticks([])
else:
ax_hist.set_ylabel("Frequency", fontsize=18)
# add the dates to the plots
ax_map.text(0.58, 0.94, obs_dates[i].strftime("%Y-%m-%d"),
transform=ax_map.transAxes, fontsize=18, color='w',
bbox=dict(boxstyle="round",
ec=(0.5, 0.5, 0.5),
fc=(0.5, 0.5, 0.5),
))
cb_ax = fig.add_axes([0.92, 0.41, 0.02, 0.44])
cbar = fig.colorbar(pc, cax=cb_ax)
cbar.set_label('Daily Mean FRP (MW)', fontsize=20)
cbar.solids.set_edgecolor("face")
plt.show()
In [ ]:
def image_histogram_equalization(image, number_bins=256):
# from http://www.janeriksolem.net/2009/06/histogram-equalization-with-python-and.html
# get image histogram
image_histogram, bins = np.histogram(image[image > 0].flatten(), number_bins, normed=True)
cdf = image_histogram.cumsum() # cumulative distribution function
cdf = 255 * cdf / cdf[-1] # normalize
# use linear interpolation of cdf to find new pixel values
image_equalized = np.interp(image.flatten(), bins[:-1], cdf)
return image_equalized.reshape(image.shape)
# plot a scale bar with 4 subdivisions on the left side of the map
def scale_bar_left(ax, bars=1, length=None, location=(0.1, 0.05), linewidth=3, col='black'):
"""
ax is the axes to draw the scalebar on.
bars is the number of subdivisions of the bar (black and white chunks)
length is the length of the scalebar in km.
location is left side of the scalebar in axis coordinates.
(ie. 0 is the left side of the plot)
linewidth is the thickness of the scalebar.
color is the color of the scale bar
"""
# Get the limits of the axis in lat long
llx0, llx1, lly0, lly1 = ax.get_extent(ccrs.PlateCarree())
# Make tmc aligned to the left of the map,
# vertically at scale bar location
sbllx = llx0 + (llx1 - llx0) * location[0]
sblly = lly0 + (lly1 - lly0) * location[1]
tmc = ccrs.TransverseMercator(sbllx, sblly)
# Get the extent of the plotted area in coordinates in metres
x0, x1, y0, y1 = ax.get_extent(tmc)
# Turn the specified scalebar location into coordinates in metres
sbx = x0 + (x1 - x0) * location[0]
sby = y0 + (y1 - y0) * location[1]
# Calculate a scale bar length if none has been given
# (Theres probably a more pythonic way of rounding the number but this works)
if not length:
length = (x1 - x0) / 5000 # in km
ndim = int(np.floor(np.log10(length))) # number of digits in number
length = round(length, -ndim) # round to 1sf
# Returns numbers starting with the list
def scale_number(x):
if str(x)[0] in ['1', '2', '5']:
return int(x)
else:
return scale_number(x - 10 ** ndim)
length = scale_number(length)
# Generate the x coordinate for the ends of the scalebar
bar_xs = [sbx, sbx + length * 1000 / bars]
# Plot the scalebar chunks
barcol = 'black'
for i in range(0, bars):
# plot the chunk
ax.plot(bar_xs, [sby, sby], transform=tmc, color=barcol, linewidth=linewidth)
# alternate the colour
if barcol == 'white':
barcol = 'dimgrey'
else:
barcol = 'white'
# Generate the x coordinate for the number
bar_xt = sbx + i * length * 1000 / bars
# Plot the scalebar label for that chunk
# ax.text(bar_xt, sby, str(int(i * length / bars)), transform=tmc,
# horizontalalignment='center', verticalalignment='bottom',
# color=col)
# work out the position of the next chunk of the bar
bar_xs[0] = bar_xs[1]
bar_xs[1] = bar_xs[1] + length * 1000 / bars
# Generate the x coordinate for the last number
bar_xt = sbx + length * 1000
# Plot the last scalebar label
ax.text(bar_xt/2., sby, str(int(length))+ ' km', transform=tmc,
horizontalalignment='center', verticalalignment='bottom',
color=col)
# Plot the unit label below the bar
bar_xt = sbx + length * 1000 / 2
bar_yt = y0 + (y1 - y0) * (location[1] / 4)
# ax.text(bar_xt, bar_yt, 'km', transform=tmc, horizontalalignment='center',
# verticalalignment='bottom', color=col)
def read_nc(f):
nc_file = Dataset(f)
# get geo
lon_dims = nc_file['x_range'][:]
lat_dims = nc_file['y_range'][:]
spacing = nc_file['spacing'][:]
lons = np.arange(lon_dims[0], lon_dims[1], spacing[0])
lats = np.flipud(np.arange(lat_dims[0], lat_dims[1], spacing[1]))
lons, lats = np.meshgrid(lons, lats)
# get mask
z = nc_file['z'][:].reshape([3000, 3000])
mask = z > 0
return {"mask": mask, "lats": lats, "lons": lons}
def create_peat_map_dict():
# load in peat map and fires
path_to_peat_maps = '/Volumes/INTENSO/kcl-fire-aot/ODA/external/peat_maps'
peat_map_dict = {}
peat_maps_paths = glob.glob(path_to_peat_maps + '*/*/*.nc')
for peat_maps_path in peat_maps_paths:
peat_map_key = peat_maps_path.split("/")[-1].split(".")[0]
peat_map_dict[peat_map_key] = read_nc(peat_maps_path)
return peat_map_dict
def proc_params():
d = {}
d['full_plume'] = True
d['plot'] = False
d['resampled_pix_size'] = 750 # size of UTM grid in meters
d['frp_df'] = ut.read_frp_df(fp.path_to_himawari_frp)
d['plume_df'] = ut.read_plume_polygons(fp.plume_polygon_path_csv)
# set up ORAC bias adjustment from AERONET comparison
d['bias_adjust'] = True
d['orac_slope'] = 1.24459532607
d['orac_intercept'] = 0.939506768761
geo_file = os.path.join(fp.path_to_himawari_imagery, 'Himawari_lat_lon.img')
geostationary_lats, geostationary_lons = load_hrit.geo_read(geo_file)
d['geostationary_lats'] = geostationary_lats
d['geostationary_lons'] = geostationary_lons
d['output_path'] = fp.pt_vis_path
d['df_output_path'] = fp.path_to_frp_tpm_models
d['df_list'] = []
return d
def setup_plume_data():
pp = proc_params()
previous_timestamp = ''
geom_geo_list = []
geom_utm_list = []
data_utm_list = []
time_stamps = []
# itereate over the plumes
for p_number, plume in pp['plume_df'].iterrows():
# we do not use these plumes as they are not on peat
if p_number in [1, 5, 6, 7, 17]:
continue
# make a directory to hold the plume logging information
plume_logging_path = ut.create_logger_path(p_number)
# get plume time stamp
current_timestamp = ut.get_timestamp(plume.filename, 'viirs')
time_stamps.append(current_timestamp)
# read in satellite data
if current_timestamp != previous_timestamp:
try:
sat_data = ut.setup_sat_data(current_timestamp)
if pp['bias_adjust']:
sat_data['orac_aod'] = (sat_data['orac_aod'] -
pp['orac_intercept']) / pp['orac_slope']
except Exception, e:
continue
sat_data_utm = ut.resample_satellite_datasets(sat_data, pp=pp, plume=plume)
previous_timestamp = current_timestamp
if sat_data_utm is None:
continue
# construct plume and background coordinate data
plume_geom_geo = ut.setup_plume_data(plume, sat_data_utm)
geom_geo_list.append(plume_geom_geo)
# subset the satellite AOD data to the plume
plume_data_utm = ut.subset_sat_data_to_plume(sat_data_utm, plume_geom_geo)
data_utm_list.append(plume_data_utm)
# Reproject plume shapely objects to UTM
plume_geom_utm = ut.resample_plume_geom_to_utm(plume_geom_geo)
geom_utm_list.append(plume_geom_utm)
return geom_geo_list, data_utm_list, geom_utm_list, time_stamps
def extract_combined_aod_full_plume(plume_data_utm,
plume_mask):
# todo add to config
orac_limit = 10
# combine plume mask with VIIRS good and ORAC good
viirs_good = plume_data_utm['viirs_flag_utm_plume'] <= 1
orac_good = plume_data_utm['orac_cost_utm_plume'] <= orac_limit
viirs_plume_mask = plume_mask & viirs_good # viirs contribtuion
orac_plume_mask = plume_mask & (orac_good & ~viirs_good) # ORAC contribution
# make combine product
combined = np.zeros(viirs_plume_mask.shape) - 999
combined[orac_plume_mask] = plume_data_utm['orac_aod_utm_plume'][orac_plume_mask]
combined[viirs_plume_mask] = plume_data_utm['viirs_aod_utm_plume'][viirs_plume_mask]
return combined
def interp_aod(aod, plume_mask):
'''
Interpolate using a radial basis function. See models
that shows thta this is the best approach.
'''
good_mask = aod != -999
# build the interpolation grid
y = np.linspace(0, 1, aod.shape[0])
x = np.linspace(0, 1, aod.shape[1])
xx, yy = np.meshgrid(x, y)
# create interpolated grid (can extend to various methods)
rbf = interpolate.Rbf(xx[good_mask], yy[good_mask], aod[good_mask], function='linear')
interpolated_aod = rbf(xx, yy)
aod[~good_mask] = interpolated_aod[~good_mask]
return aod
def assign_peat(peat_df, fire_df):
peat_lat_lon = np.dstack([np.deg2rad(peat_df.lats.values),
np.deg2rad(peat_df.lons.values)])[0]
peat_balltree = BallTree(peat_lat_lon, metric='haversine')
# get the unique flare lats and lons for assessment in kdtree
fire_locations = np.array(zip(np.deg2rad(fire_df.LATITUDE.values),
np.deg2rad(fire_df.LONGITUDE.values)))
# compare the flare locations to the potential locations in the orbit
distances, indexes = peat_balltree.query(fire_locations, k=1)
# set up the dataframe to hold the distances
fire_df['is_peat'] = peat_df.is_peat.values[indexes]
return fire_df
def assign_him_gridcell(him_lats, him_lons, fire_df):
# find the min and max positions
mask = ((him_lats > fire_df.latitude.min()) &
(him_lats < fire_df.latitude.max()) &
(him_lons > fire_df.longitude.min()) &
(him_lons < fire_df.longitude.max()))
y,x = np.where(mask)
min_y = np.min(y)
max_y = np.max(y)
min_x = np.min(x)
max_x = np.max(x)
# subsset lat lon grid
sub_lats = him_lats[min_y:max_y, min_x:max_x]
sub_lons = him_lons[min_y:max_y, min_x:max_x]
# subset the himawair lat lon grids
print("building ball tree")
lat_lon = np.dstack([np.deg2rad(sub_lats.flatten()),
np.deg2rad(sub_lons.flatten())])[0]
balltree = BallTree(lat_lon, metric='haversine')
# get the unique flare lats and lons for assessment in kdtree
print("zipping locations")
fire_locations = np.array(zip(np.deg2rad(fire_df.latitude.values),
np.deg2rad(fire_df.longitude.values)))
# compare the flare locations to the potential locations in the orbit
print("querying ball tree")
distances, indexes = balltree.query(fire_locations, k=1)
# convert indexes to row/col
print("getting indexes")
row = indexes / (max_x - min_x)
col = indexes % (max_x - min_x)
# update to full grid positions
row += min_y
col += min_x
print("updating DF")
fire_df["HIM_row"] = row
fire_df["HIM_col"] = col
return fire_df
def read_nc(f):
nc_file = Dataset(f)
# get geo
lon_dims = nc_file['x_range'][:]
lat_dims = nc_file['y_range'][:]
spacing = nc_file['spacing'][:]
lons = np.arange(lon_dims[0], lon_dims[1], spacing[0])
lats = np.flipud(np.arange(lat_dims[0], lat_dims[1], spacing[1]))
lons, lats = np.meshgrid(lons, lats)
# get mask
z = nc_file['z'][:].reshape([3000, 3000])
mask = z > 0
return {"is_peat": mask.ravel(), "lats": lats.ravel(), "lons": lons.ravel()}
def geo_read(f, verbose=False):
"""
read in the static data like view angle, landcover
put them in a data dictionary
"""
dim = 5500 # hard coded for Himawari8 possible it is 5500 in which case we need to zoom
if verbose:
print 'reading file %s' % f
dtype = np.float32
shape = (2, dim, dim)
data = np.fromfile(f, dtype=dtype).reshape(shape)
lat = data[0, :, :].astype(dtype)
lon = data[1, :, :].astype(dtype)
return lat, lon
In [ ]: