In [1]:
# Data wrangling libraries
import pandas as pd
import numpy as np
import rasterio
from collections import defaultdict
# Plotting libraries
import seaborn as sns
import matplotlib.pyplot as plt
%matplotlib qt
# View a DataFrame in browser
import webbrowser
from tempfile import NamedTemporaryFile
# Analysis Libraries
import scipy
import cv2
from scipy.spatial import *
from scipy.ndimage import *
from skimage.transform import *
from skimage.morphology import *
from skimage.util import *
from skimage.measure import compare_ssim as ssim
from skimage.measure import compare_mse as mse
from skimage.measure import compare_psnr as psnr
from skimage.transform import AffineTransform
from skimage.transform import warp
In [2]:
### Similarity Test Functions ###
def procrustes_analysis(data):
for d in data:
mtx1, mtx2, disparity = procrustes(data[0], d)
# disparity is the sum of the square errors
# mtx2 is the optimal matrix transformation
disp_vals.append(disparity.round(3))
def peak_snr(data, x):
for d in data:
psnr_vals.append( psnr ( data[0], d, dynamic_range = x ).round(3))
def make_quadrants(data):
q = data[0].shape[0] / 2
for d in data:
tl, tr, ll, lr = d[:q, :q], d[q:, :q], d[:q, q:], d[q:, q:]
top_lefts.append(tl)
top_rights.append(tr)
low_lefts.append(ll)
low_rights.append(tr)
def structural_sim(data):
for d in data:
ssim_vals.append( ssim ( data[0], d ).round( 2 ))
ssim_maps.append( ssim ( data[0], d, full = True )[1] )
def reg_mse(data):
for d in data:
mse_vals.append(( mse ( data[0], d )).round(2))
mse_maps.append((data[0] - d) ** 2)
def imse( data ):
for d in data:
unique_vals_and_counts = np.round( np.unique( data[0], return_counts = True ), 1 )
vals = np.array( unique_vals_and_counts[0], dtype = 'float32' )
counts = np.array( unique_vals_and_counts[1], dtype = 'float32' )
num_pixels = data[0].size
shannons = np.round( np.divide( counts, num_pixels ), 6 )
info_vals = np.round( np.log(1/shannons), 2)
unique_info_vals = zip(vals,info_vals)
trans_dct = {}
for v in unique_info_vals:
trans_dct[v[0]] = v[1]
infomap = np.copy( data[0] )
for k, v in trans_dct.iteritems(): infomap[data[0] == k] = v
imse_map = (( infomap * data[0] ) - ( infomap * d )) ** 2
imse_maps.append(imse_map)
err = np.sum( imse_map )
err /= float(data[0].shape[0] * data[0].shape[1])
imse_vals.append( np.round(err, 2 ))
def np_hist_to_cv(np_histogram_output):
counts, bin_edges = np_histogram_output
return counts.ravel().astype('float32')
### Plotting Functions ###
# Function to display DataFrame in new browser tab.
def df_window(df):
with NamedTemporaryFile(delete=False, suffix='.html') as f:
df.to_html(f)
webbrowser.open(f.name)
# Plot binary patterns and distortions
def plot_binary(names, data):
fig, axes = plt.subplots( nrows = 4, ncols = 4 )
for p, dat, ax in zip( names, data, axes.flat ):
# The vmin and vmax arguments specify the color limits
im = ax.imshow(dat, cmap = 'gray', interpolation = 'nearest', vmin=0, vmax=1)
ax.set_xticks([])
ax.set_yticks([])
ax.set_title(p,fontsize = 10, color = 'white')
# if # subplots is strange
fig.delaxes(axes[-1,-1])
fig.delaxes(axes[-1,-2])
fig.delaxes(axes[-1,-3])
# Make an axis for the colorbar on the bottom
cax = fig.add_axes( [0.05, 0.2, 0.04, 0.6] )
fig.colorbar( im, cax=cax, ticks = ([0,1]) )
cax.tick_params(labelsize = 10, colors = 'white')
# Plot continuous patterns and distortions
def plot_continuous(names, data):
fig, axes = plt.subplots( nrows = 4, ncols = 4 )
for p, dat, ax in zip( names, data, axes.flat ):
# The vmin and vmax arguments specify the color limits
im = ax.imshow(dat, cmap = 'viridis', interpolation = 'nearest', vmin=-1, vmax=1)
ax.set_xticks([])
ax.set_yticks([])
ax.set_title(p,fontsize = 10, color = 'white')
# if # subplots is strange
fig.delaxes(axes[-1,-1])
# Make an axis for the colorbar on the bottom
cax = fig.add_axes( [0.05, 0.2, 0.04, 0.6] )
fig.colorbar( im, cax=cax, ticks = ( [-1,0,1] ) )
cax.tick_params(labelsize = 10, colors = 'white')
def plot_snow(names, data):
fig, axes = plt.subplots( nrows = 4, ncols = 4 )
fig.suptitle('Fidelity Tests of Snow Depth Patterns [m]', color = 'white')
for p, dat, ax in zip( names, data, axes.flat ):
# The vmin and vmax arguments specify the color limits
im = ax.imshow(dat, cmap = 'viridis', interpolation = 'nearest', vmin = 0, vmax = 2)
ax.set_xticks([])
ax.set_yticks([])
ax.set_title(p,fontsize = 8, color = 'white')
# if # subplots is prime
fig.delaxes(axes[-1,-1])
fig.delaxes(axes[-1,-2])
fig.delaxes(axes[-1,-3])
# Make an axis for the colorbar on the bottom
cax = fig.add_axes( [0.05, 0.2, 0.04, 0.6] )
fig.colorbar( im, cax=cax, ticks = ( [0,1,2] ) )
cax.tick_params(labelsize = 8, colors = 'white')
def plot_tests(names, test_vals, test_name, data, rows, cols, cmin, cmax):
fig, axes = plt.subplots( nrows = 4, ncols = 4 )
for p, v, dat, ax in zip( names, test_vals, data, axes.flat ):
# The vmin and vmax arguments specify the color limits
im = ax.imshow(dat, cmap = 'viridis', interpolation = 'nearest', vmin = cmin, vmax = cmax)
ax.set_xticks([])
ax.set_yticks([])
ax.set_title(p + " " + test_name + str(v), fontsize = 8, color = 'white' )
# if # subplots is strange
if len(names) != rows*cols:
diff = -1*( rows*cols - len(names))
i = -1
while i >= diff:
fig.delaxes(axes[-1,i])
i = i-1
# Make an axis for the colorbar on the bottom
cax = fig.add_axes( [0.05, 0.2, 0.04, 0.6] )
fig.colorbar( im, cax=cax, ticks = ( [cmin, cmax] ) )
cax.tick_params(labelsize = 8, colors = 'white')
In [3]:
### Set of Reference Patterns
# Horizontal Stripes alternating 4 white (1) and 4 black (0)
stripes = np.zeros(( 32, 32 ))
j = 0
k = 4
while k < 33:
stripes[j:k] = 1
j = j + 8
k = j + 4
# from negative pi to pi
pi_cycles = np.linspace( -np.pi, np.pi, 512 )
# vertically stack two of the above
pi_cycles = np.append( pi_cycles, pi_cycles )
pi_cycles = pi_cycles.reshape( 32, 32 )
# Sine Wave
sine = np.sin( pi_cycles )
# Cosine
cosine = np.cos( pi_cycles )
# Gaussian Noise
mu = 0.5
sigma = 0.15
gauss = np.random.normal( mu, sigma, ( 32,32 ))
#Snow
src1 = rasterio.open( '/home/cparr/Snow_Patterns/snow_data/happy_valley/raster/snow_on/hv_snow_watertrack_square2012.tif' )
snow_test = src1.read(1)
snow_test = snow_test.astype('float64')
In [ ]:
# '''
# Warping a reference pattern of binary data.
# '''
# binwarp_data = []
# # Initialize lists for metrics.
# mse_vals = []
# ssim_vals = []
# psnr_vals = []
# top_lefts = []
# top_rights = []
# low_lefts = []
# low_rights = []
# imse_vals = []
# imse_maps = []
# def warp_binary(pattern):
# binwarp_data.append(pattern)
# rows, cols = pattern.shape
# # half phase shift for stripes
# half_phase = np.zeros((32, 32))
# j = 2
# k = 6
# while k < 33:
# half_phase[j:k] = 1
# j = j + 8
# k = j + 4
# binwarp_data.append(half_phase)
# # 90 degree rotation
# rotate90 = np.rot90(pattern)
# binwarp_data.append(rotate90)
# #45 degree rotation
# oblique = rotate(pattern, 45)
# binwarp_data.append(oblique)
# # morphological dilation and erosion
# morph_dilation = dilation(pattern)
# morph_erosion = erosion(pattern)
# binwarp_data.append(morph_dilation)
# binwarp_data.append(morph_erosion)
# # flip up and down, basically a full phase shift or reflection
# inverse = np.flipud(pattern)
# binwarp_data.append(inverse)
# # a shift or translation
# shift_M = np.float32([[1,0,1],[0,1,0]])
# shifted = cv2.warpAffine(pattern,shift_M,(cols,rows))
# binwarp_data.append(shifted)
# # randomly shuffle rows of array, create a random frequency
# permutation = np.random.permutation(pattern)
# binwarp_data.append(permutation)
# # Random Affine Transformation
# c = np.random.random_sample(( 6, ))
# m = np.append( c, ( 0,0,1 ) )
# m = m.reshape( 3,3 )
# aff_t = AffineTransform( matrix = m )
# random_aff_warp = warp( pattern, aff_t )
# binwarp_data.append( random_aff_warp )
# # gauss
# binwarp_data.append(gauss)
# # random binary
# random_bin = np.random.randint(2, size=1024)
# random_bin = random_bin.reshape(32,32)
# random_bin = random_bin.astype('float64')
# binwarp_data.append(random_bin)
# # Finger edges
# edge = np.zeros(( 32, 32 ))
# j = 0
# k = 4
# while k < 33:
# edge[j:k] = 1
# j = j + 8
# k = j + 4
# edge[3][1::2] = 0
# edge[7][1::2] = 1
# edge[11][1::2] = 0
# edge[15][1::2] = 1
# edge[19][1::2] = 0
# edge[23][1::2] = 1
# edge[27][1::2] = 0
# edge[31][1::2] = 1
# binwarp_data.append(edge)
# # Subplot Titles and Dictionary Keys
# binwarp_names = ['Original', 'Half Phase Shift', 'Rotate 90','Rotate 45',
# 'Dilation', 'Erosion','Flip U/D', 'X Shift',
# 'Row Shuffle', 'Random Affine', 'Gauss', 'Random','Edges']
# # Call It.
# warp_binary(stripes)
# # Call Metrics on list of test patterns
# peak_snr( binwarp_data, 64 )
# # PSNR requires the dynamic range, e.g. 8-bit data is 255
# structural_sim( binwarp_data )
# reg_mse( binwarp_data )
# #procrustes_analysis( binwarp_data )
# # this doesn't seem to work for binary..
# disp_vals = np.arange(0,13)
# make_quadrants( binwarp_data )
# imse(binwarp_data)
# # Match names and arrays
# binary_zip = zip(binwarp_names,binwarp_data, mse_vals, ssim_vals, psnr_vals, disp_vals,
# top_lefts, top_rights, low_lefts, low_rights, imse_vals, imse_maps)
In [ ]:
# '''
# Warping a base pattern of continuous data.
# Applying MSE, SSIM, PSNR, and Procrustes.
# Zipping all of these values into a mega - list.
# Afterward, create a dict and a DataFrame by iterating through mega - list.
# '''
# ctswarp_data = []
# # Initialize lists for metrics.
# mse_vals = []
# ssim_vals = []
# psnr_vals = []
# disp_vals = []
# top_lefts = []
# top_rights = []
# low_lefts = []
# low_rights = []
# imse_vals = []
# imse_maps = []
# # Create the test patterns.
# def warp_continuous(pattern):
# ctswarp_data.append(pattern)
# rows, cols = pattern.shape
# # 90 degree rotation
# rotate90 = np.rot90(pattern)
# ctswarp_data.append(rotate90)
# #45 degree rotation
# oblique = rotate(pattern, 45)
# ctswarp_data.append(oblique)
# # morphological dilation and erosion
# morph_dilation = dilation(pattern)
# morph_erosion = erosion(pattern)
# ctswarp_data.append(morph_dilation)
# ctswarp_data.append(morph_erosion)
# # flip up and down, basically a phase shift
# inverse = np.flipud(pattern)
# ctswarp_data.append(inverse)
# # a shift or translation
# shift_M = np.float32([[1,0,1],[0,1,0]])
# shifted = cv2.warpAffine(pattern,shift_M,(cols,rows))
# ctswarp_data.append(shifted)
# # randomly shuffle rows of array, create a random frequency
# permutation = np.random.permutation(pattern)
# ctswarp_data.append(permutation)
# # sine warp
# # basically sine of sine...reduces the intensity
# sine_of = np.sin(pattern)
# ctswarp_data.append(sine_of)
# # cosine
# ctswarp_data.append(cosine)
# # Random between -1 and 1
# random_abs1 = np.random.uniform(-1, 1, [32,32])
# ctswarp_data.append(random_abs1)
# # Gaussian noise
# mu = 0
# sigma = 0.32
# gauss_abs1 = np.random.normal(mu, sigma, (32,32))
# ctswarp_data.append(gauss_abs1)
# # Random Affine Transformation
# c = np.random.random_sample(( 6, ))
# m = np.append( c, ( 0,0,1 ) )
# m = m.reshape( 3,3 )
# aff_t = AffineTransform( matrix = m )
# random_aff_warp = warp( pattern, aff_t )
# ctswarp_data.append( random_aff_warp )
# # Additive Gaussian Noise
# noise = random_noise( pattern, mode = 'gaussian' )
# ctswarp_data.append( noise )
# # More Additive Gaussian Noise
# more_noise = random_noise(random_noise(random_noise(random_noise( noise, mode = 'gaussian' ))))
# ctswarp_data.append( more_noise )
# # Plot Titles and dictionary keys
# ctswarp_names = ['Reference', 'Rotate 90', 'Rotate 45', 'Dilation',
# 'Erosion', 'Flip U/D', 'X Shift', 'Row Shuffle',
# 'Sine (Reference)', 'Cosine', 'Random', 'Gauss',
# 'Random Affine', 'Add Gaussian Noise','More Noise']
# # Call It.
# warp_continuous( sine )
# # Call Metrics on list of test patterns
# peak_snr( ctswarp_data, 64 )
# # PSNR requires the dynamic range, e.g. 8-bit data is 255
# structural_sim( ctswarp_data )
# reg_mse( ctswarp_data )
# procrustes_analysis( ctswarp_data )
# make_quadrants( ctswarp_data )
# imse(ctswarp_data)
# # Zip names, data, metrics, quadrants into a mega list!
# # Generally this is indavisable because it relies on indexing...in the next cell we will make a dictionary.
# continuous_zip = zip(ctswarp_names,ctswarp_data, mse_vals, ssim_vals, psnr_vals, disp_vals,
# top_lefts, top_rights, low_lefts, low_rights, imse_vals, imse_maps)
In [4]:
'''
Snow Data Test
'''
snow_data = []
# Initialize lists for metrics.
mse_vals = []
ssim_vals = []
psnr_vals = []
disp_vals = []
top_lefts = []
top_rights = []
low_lefts = []
low_rights = []
imse_vals = []
imse_maps = []
mse_maps = []
ssim_maps = []
mse_maps = []
# Create the test snows.
def warp_snow(snow):
snow_data.append(snow)
rows, cols = snow.shape
mu = snow.mean()
sigma = snow.std()
# 90 degree rotation
rotate90 = np.rot90(snow)
snow_data.append(rotate90)
#45 degree rotation
oblique = rotate(snow, 45)
b = oblique == 0
oblique[b] = np.random.normal(mu, sigma, size=b.sum())
snow_data.append(oblique)
# morphological dilation and erosion
selem = square(7)
morph_dilation = dilation(snow, selem)
morph_erosion = erosion(snow, selem)
snow_data.append(morph_dilation)
snow_data.append(morph_erosion)
# flip up and down, basically a phase shift
inverse = np.flipud(snow)
snow_data.append(inverse)
# a shift or translation
shift_M = np.float32([[1,0,1],[0,1,0]])
shifted = cv2.warpAffine(snow,shift_M,(cols,rows))
snow_data.append(shifted)
# randomly shuffle rows of array, create a random frequency
permutation = np.random.permutation(snow)
snow_data.append(permutation)
# Random between bounds
random_abs1 = np.random.uniform(snow.min(), snow.max(), [rows, cols])
snow_data.append(random_abs1)
# Gaussian noise
mu = snow.mean()
sigma = snow.std()
gauss_abs1 = np.random.normal(mu, sigma, (rows, cols))
snow_data.append(gauss_abs1)
# Random Affine Transformation
c = np.random.random_sample(( 6, ))
m = np.append( c, ( 0,0,1 ) )
m = m.reshape( 3,3 )
aff_t = AffineTransform( matrix = m )
random_aff_warp = warp( snow, aff_t )
b = random_aff_warp == 0
random_aff_warp[b] = np.random.normal(mu, sigma, size=b.sum())
snow_data.append(random_aff_warp)
# Additive Gaussian Noise
noise = random_noise( snow, mode = 'gaussian' )
snow_data.append( noise )
# More Additive Gaussian Noise
more_noise = random_noise(random_noise(random_noise(random_noise( noise, mode = 'gaussian' ))))
snow_data.append( more_noise )
# Plot Titles and dictionary keys
snow_names = ['Reference', 'Rotate 90', 'Rotate 45', 'Dilation',
'Erosion', 'Y - Reflection', 'X Shift', 'Row Shuffle', 'Random', 'Gauss',
'Random Affine', 'Add Gaussian Noise','More Noise']
# Call It.
warp_snow( snow_test )
# Call Metrics on list of test snows
peak_snr( snow_data, 64 )
# PSNR requires the dynamic range, e.g. 8-bit data is 255
structural_sim( snow_data )
reg_mse( snow_data )
procrustes_analysis( snow_data )
make_quadrants( snow_data )
imse(snow_data)
# Zip names, data, metrics, quadrants into a mega list!
# Generally this is indavisable because it relies on indexing...in the next cell we will make a dictionary.
snow_zip = zip(snow_names,snow_data, mse_vals, ssim_vals, psnr_vals, disp_vals,
top_lefts, top_rights, low_lefts, low_rights, imse_vals, imse_maps, mse_maps, ssim_maps)
In [8]:
# To plot
#plot_binary( binwarp_names, binwarp_data )
#plot_continuous( ctswarp_names, ctswarp_data )
#plot_snow( snow_names, snow_data )
# names, test_vals, test_name, data, rows, cols, cmin, cmax
plot_tests( snow_names, ssim_vals, " SSIM: ", ssim_maps, 4, 4, 0, 1 )
# To save the figure
#plt.savefig( '/home/cparr/stripe_world.png', bbox_inches = 'tight', dpi = 300, facecolor = 'purple' )
#plt.savefig('/home/cparr/pi_world.png', bbox_inches = 'tight', dpi = 300, facecolor = 'black')
#plt.savefig('/home/cparr/Snow_Patterns/figures/hv_snow_test/hv_snow_metric_test.png', bbox_inches = 'tight', dpi = 300, facecolor = 'black')
#plt.savefig('/home/cparr/Snow_Patterns/figures/hv_snow_test/hv_ssim_map.png', bbox_inches = 'tight', dpi = 300, facecolor = 'black')
#plt.savefig('/home/cparr/Snow_Patterns/figures/hv_snow_test/hv_mse_map.png', bbox_inches = 'tight', dpi = 300, facecolor = 'black')
#plt.savefig('/home/cparr/Snow_Patterns/figures/hv_snow_test/hv_imse_map.png', bbox_inches = 'tight', dpi = 300, facecolor = 'black')
In [29]:
from skimage import measure
fig, axes = plt.subplots( nrows = 4, ncols = 4 )
fig.suptitle('Fidelity Tests of Snow Depth Patterns [m]', color = 'white')
for p, dat, ax in zip( snow_names, snow_data, axes.flat ):
contours = measure.find_contours(dat, 0.8)
# The vmin and vmax arguments specify the color limits
im = ax.imshow(dat, cmap = 'gray', interpolation = 'nearest', vmin = 0, vmax = 2)
for n, contour in enumerate(contours):
if contour.size >= 150:
ax.plot(contour[:, 1], contour[:, 0], linewidth=0.5)
ax.set_xticks([])
ax.set_yticks([])
ax.set_title(p,fontsize = 8, color = 'white')
ax.axis('image')
# if # subplots is prime
fig.delaxes(axes[-1,-1])
fig.delaxes(axes[-1,-2])
fig.delaxes(axes[-1,-3])
# Make an axis for the colorbar on the bottom
cax = fig.add_axes( [0.05, 0.2, 0.04, 0.6] )
fig.colorbar( im, cax=cax, ticks = ( [0,1,2] ) )
cax.tick_params(labelsize = 8, colors = 'white')
plt.savefig('/home/cparr/Snow_Patterns/figures/hv_snow_test/hv_contour_map.png', bbox_inches = 'tight', dpi = 300, facecolor = 'black')
In [23]:
contours = measure.find_contours(snow_test, 0.8)
l = []
for c in contours:
l.append(c.size)
np.mean(l)+np.std(l)
Out[23]:
In [31]:
# Making a look up dictionary from all the patterns and their comparison scores.
continuous_dict = defaultdict(dict)
binary_dict = defaultdict(dict)
snow_dict = defaultdict(dict)
def to_dict_w_hists( data_dict, keys, data_zip ):
i = 0
while i < len(keys):
data_dict[keys[i]]['name'] = data_zip[i][0]
# data_dict[keys[i]]['array'] = data_zip[i][1]
data_dict[keys[i]]['arrays'] = {}
data_dict[keys[i]]['arrays']['full'] = {}
data_dict[keys[i]]['arrays']['full']['array'] = data_zip[i][1]
data_dict[keys[i]]['arrays']['full']['numpy hist'] = np.histogram( data_zip[i][1] )
data_dict[keys[i]]['arrays']['full']['cv2 hist'] = np_hist_to_cv( np.histogram( data_zip[i][1] ) )
data_dict[keys[i]]['arrays']['top left'] = {}
data_dict[keys[i]]['arrays']['top left']['array'] = data_zip[i][6]
data_dict[keys[i]]['arrays']['top left']['numpy hist'] = np.histogram( data_zip[i][6] )
data_dict[keys[i]]['arrays']['top left']['cv2 hist'] = np_hist_to_cv( np.histogram( data_zip[i][6] ) )
data_dict[keys[i]]['arrays']['top right'] = {}
data_dict[keys[i]]['arrays']['top right']['array'] = data_zip[i][7]
data_dict[keys[i]]['arrays']['top right']['numpy hist'] = np.histogram( data_zip[i][7] )
data_dict[keys[i]]['arrays']['top right']['cv2 hist'] = np_hist_to_cv( np.histogram( data_zip[i][7] ) )
data_dict[keys[i]]['arrays']['low left'] = {}
data_dict[keys[i]]['arrays']['low left']['array'] = data_zip[i][8]
data_dict[keys[i]]['arrays']['low left']['numpy hist'] = np.histogram( data_zip[i][8] )
data_dict[keys[i]]['arrays']['low left']['cv2 hist'] = np_hist_to_cv( np.histogram( data_zip[i][8] ) )
data_dict[keys[i]]['arrays']['low right'] = {}
data_dict[keys[i]]['arrays']['low right']['array'] = data_zip[i][9]
data_dict[keys[i]]['arrays']['low right']['numpy hist'] = np.histogram( data_zip[i][9] )
data_dict[keys[i]]['arrays']['low right']['cv2 hist'] = np_hist_to_cv( np.histogram( data_zip[i][9] ) )
data_dict[keys[i]]['MSE'] = round(data_zip[i][2], 2)
data_dict[keys[i]]['SSIM'] = round(data_zip[i][3], 2)
data_dict[keys[i]]['Peak SNR'] = round(data_zip[i][4], 2)
data_dict[keys[i]]['Procrustres Disparity'] = round(data_zip[i][5], 2)
#
data_dict[keys[i]]['IMSE'] = round(data_zip[i][10], 2)
data_dict[keys[i]]['IMSE Map'] = data_zip[i][11]
# Histogram Comparisons
# Bhattacharyya
data_dict[keys[i]]['Bhattacharyya Full'] = round(cv2.compareHist(
data_dict[keys[i]]['arrays']['full']['cv2 hist'],
data_dict[keys[0]]['arrays']['full']['cv2 hist'],
cv2.cv.CV_COMP_BHATTACHARYYA), 2)
data_dict[keys[i]]['Bhattacharyya UL'] = round(cv2.compareHist(
data_dict[keys[i]]['arrays']['top left']['cv2 hist'],
data_dict[keys[0]]['arrays']['top left']['cv2 hist'],
cv2.cv.CV_COMP_BHATTACHARYYA), 2)
data_dict[keys[i]]['Bhattacharyya UR'] = round(cv2.compareHist(
data_dict[keys[i]]['arrays']['top right']['cv2 hist'],
data_dict[keys[0]]['arrays']['top right']['cv2 hist'],
cv2.cv.CV_COMP_BHATTACHARYYA), 2)
data_dict[keys[i]]['Bhattacharyya LL'] = round(cv2.compareHist(
data_dict[keys[i]]['arrays']['low left']['cv2 hist'],
data_dict[keys[0]]['arrays']['low left']['cv2 hist'],
cv2.cv.CV_COMP_BHATTACHARYYA), 2)
data_dict[keys[i]]['Bhattacharyya LR'] = round(cv2.compareHist(
data_dict[keys[i]]['arrays']['low right']['cv2 hist'],
data_dict[keys[0]]['arrays']['low right']['cv2 hist'],
cv2.cv.CV_COMP_BHATTACHARYYA), 2)
# Chi Square
data_dict[keys[i]]['Chi Square Full'] = round(cv2.compareHist(
data_dict[keys[i]]['arrays']['full']['cv2 hist'],
data_dict[keys[0]]['arrays']['full']['cv2 hist'],
cv2.cv.CV_COMP_CHISQR), 2)
data_dict[keys[i]]['Chi Square UL'] = round(cv2.compareHist(
data_dict[keys[i]]['arrays']['top left']['cv2 hist'],
data_dict[keys[0]]['arrays']['top left']['cv2 hist'],
cv2.cv.CV_COMP_CHISQR), 2)
data_dict[keys[i]]['Chi Square UR'] = round(cv2.compareHist(
data_dict[keys[i]]['arrays']['top right']['cv2 hist'],
data_dict[keys[0]]['arrays']['top right']['cv2 hist'],
cv2.cv.CV_COMP_CHISQR), 2)
data_dict[keys[i]]['Chi Square LL'] = round(cv2.compareHist(
data_dict[keys[i]]['arrays']['low left']['cv2 hist'],
data_dict[keys[0]]['arrays']['low left']['cv2 hist'],
cv2.cv.CV_COMP_CHISQR), 2)
data_dict[keys[i]]['Chi Square LR'] = round(cv2.compareHist(
data_dict[keys[i]]['arrays']['low right']['cv2 hist'],
data_dict[keys[0]]['arrays']['low right']['cv2 hist'],
cv2.cv.CV_COMP_CHISQR), 2)
# Correlation
data_dict[keys[i]]['Correlation Full'] = round(cv2.compareHist(
data_dict[keys[i]]['arrays']['full']['cv2 hist'],
data_dict[keys[0]]['arrays']['full']['cv2 hist'],
cv2.cv.CV_COMP_CORREL), 2)
data_dict[keys[i]]['Correlation UL'] = round(cv2.compareHist(
data_dict[keys[i]]['arrays']['top left']['cv2 hist'],
data_dict[keys[0]]['arrays']['top left']['cv2 hist'],
cv2.cv.CV_COMP_CORREL), 2)
data_dict[keys[i]]['Correlation UR'] = round(cv2.compareHist(
data_dict[keys[i]]['arrays']['top right']['cv2 hist'],
data_dict[keys[0]]['arrays']['top right']['cv2 hist'],
cv2.cv.CV_COMP_CORREL), 2)
data_dict[keys[i]]['Correlation LL'] = round(cv2.compareHist(
data_dict[keys[i]]['arrays']['low left']['cv2 hist'],
data_dict[keys[0]]['arrays']['low left']['cv2 hist'],
cv2.cv.CV_COMP_CORREL), 2)
data_dict[keys[i]]['Correlation LR'] = round(cv2.compareHist(
data_dict[keys[i]]['arrays']['low right']['cv2 hist'],
data_dict[keys[0]]['arrays']['low right']['cv2 hist'],
cv2.cv.CV_COMP_CORREL), 2)
i = i + 1
In [32]:
#to_dict_w_hists( binary_dict, binwarp_names, binary_zip )
#to_dict_w_hists( continuous_dict, ctswarp_names, continuous_zip )
to_dict_w_hists( snow_dict, snow_names, snow_zip )
In [ ]:
# # Continuous Scores DataFrame
# cts_df = pd.DataFrame.from_dict(continuous_dict)
# cts_df = cts_df.transpose()
# continuous_scores = cts_df.copy()
# continuous_scores = continuous_scores[['MSE','SSIM','Peak SNR','Procrustres Disparity', 'IMSE','name']]
# continuous_scores['MSE Rank'] = np.round(continuous_scores['MSE'].rank(ascending=True))
# continuous_scores['PSNR Rank'] = np.round(continuous_scores['Peak SNR'].rank(ascending=False))
# continuous_scores['SSIM Rank'] = np.round(continuous_scores['SSIM'].rank(ascending=False))
# continuous_scores['PD Rank'] = np.round(continuous_scores['Procrustres Disparity'].rank())
# continuous_scores['IMSE Rank'] = np.round(continuous_scores['IMSE'].rank(ascending=True))
# continuous_scores = continuous_scores.sort_values('SSIM Rank')
# df_window(continuous_scores)
In [ ]:
# # Binary Scores DataFrame
# bin_df = pd.DataFrame.from_dict(binary_dict)
# bin_df = bin_df.transpose()
# binary_scores = bin_df.copy()
# binary_scores = binary_scores[['MSE','SSIM','Peak SNR','IMSE','name']]
# binary_scores['MSE Rank'] = np.round(binary_scores['MSE'].rank(ascending=True))
# binary_scores['PSNR Rank'] = np.round(binary_scores['Peak SNR'].rank(ascending=False))
# binary_scores['SSIM Rank'] = binary_scores['SSIM'].rank(ascending=False)
# binary_scores['IMSE Rank'] = np.round(binary_scores['IMSE'].rank(ascending=True))
# binary_scores = binary_scores.sort_values('SSIM Rank')
# df_window(binary_scores)
In [34]:
# Snow Scores DataFrame
snow_df = pd.DataFrame.from_dict(snow_dict)
snow_df = snow_df.transpose()
snow_scores = snow_df.copy()
snow_scores = snow_scores[['MSE','SSIM','Peak SNR','Procrustres Disparity', 'IMSE','name']]
snow_scores = snow_scores.sort_values('SSIM', ascending = False)
df_window(snow_scores)
In [35]:
def hist_score_table(df):
hist_scores = df.loc[:,['name', 'Bhattacharyya UL','Bhattacharyya UR','Bhattacharyya LL',
'Bhattacharyya LR', 'Bhattacharyya Full','Correlation UL','Correlation UR','Correlation LL',
'Correlation LR', 'Correlation Full','Chi Square UL','Chi Square UR','Chi Square LL',
'Chi Square LR', 'Chi Square Full']]
hist_scores['Mean Bhattacharyya'] = np.round(hist_scores[['Bhattacharyya UL','Bhattacharyya UR',
'Bhattacharyya LL', 'Bhattacharyya LR']].mean(axis = 1),2)
hist_scores['Mean Correlation'] = np.round(hist_scores[['Correlation UL','Correlation UR',
'Correlation LL', 'Correlation LR']].mean(axis = 1),2)
hist_scores['Mean Chi Square'] = np.round(hist_scores[['Chi Square UL','Chi Square UR',
'Chi Square LL', 'Chi Square LR']].mean(axis = 1),2)
hist_scores = hist_scores[['Mean Bhattacharyya', 'Mean Chi Square','Mean Correlation']]
hist_scores = hist_scores.sort_values('Mean Bhattacharyya')
df_window(hist_scores)
In [36]:
#hist_score_table(cts_df)
#hist_score_table(bin_df)
hist_score_table(snow_df)
In [ ]:
with sns.color_palette("spectral", n_colors=15):
g = sns.pairplot(continuous_scores, x_vars = ['MSE Rank'],
y_vars = ['PSNR Rank','SSIM Rank','PD Rank'], hue = 'name',
plot_kws=dict(s=150, edgecolor="b", linewidth=1))
labels = continuous_scores['name'].tolist()
g.set(xlim=(0, 16))
g.fig.set_tight_layout('tight')
#g.savefig('/home/cparr/cts_scores.png', dpi = 300)
In [ ]:
def make_strip_plot(df,metric):
sns.set(style="whitegrid")
# Make the PairGrid
g = sns.PairGrid(df.sort_values(metric, ascending=True),
x_vars = df.columns[1::], y_vars=["name"],
size=10, aspect=.25)
# Draw a dot plot using the stripplot function
g.map(sns.stripplot, size=10, orient="h",
palette="Reds_r", edgecolor="gray")
# Use the same x axis limits on all columns and add better labels
#g.set(xlim=(df[metric].min(), df[metric].max()), xlabel="Score", ylabel="")
# Use semantically meaningful titles for the columns
titles = df.columns[1::]
for ax, title in zip(g.axes.flat, titles):
# Set a different title for each axes
ax.set(title=title)
# Make the grid horizontal instead of vertical
ax.xaxis.grid(False)
ax.yaxis.grid(True)
sns.despine(left=True, bottom=True)
make_strip_plot(scores,"MSE")
In [ ]:
from mpl_toolkits.mplot3d import Axes3D
from matplotlib import cm
from matplotlib.ticker import LinearLocator, FormatStrFormatter
import matplotlib.pyplot as plt
import numpy as np
fig = plt.figure()
ax = fig.gca(projection='3d')
X = np.arange(0, 32)
Y = np.arange(0, 32)
X, Y = np.meshgrid(X, Y)
Z = gauss
surf = ax.plot_surface(X, Y, Z, rstride=1, cstride=1, cmap=cm.coolwarm,
linewidth=0, antialiased=False)
ax.set_zlim(0, 1)
ax.zaxis.set_major_locator(LinearLocator(10))
ax.zaxis.set_major_formatter(FormatStrFormatter('%.02f'))
fig.colorbar(surf, shrink=0.5, aspect=5)
plt.show()
In [ ]: