In [1]:
import os
import numpy as np
import matplotlib.pyplot as plt
from skimage import io
from psyutils.image import show_im
import psyutils as pu
%load_ext autoreload
%autoreload 2
%matplotlib inline
In [2]:
# load a test image:
img = pu.im_data.orientation_test()
show_im(img)
In [3]:
# make a filterbank and visualise:
ppd = 43 # set the pixels per degree of visual angle
frequencies = [1, 4, 8] # in cycles per deg
n_orientations = 8 # will run from 0 to pi. 0 is a "horizontal" gabor (vertical carrier).
pu.image.gaborbank_vis(frequencies=frequencies, n_orientations=n_orientations, ppd=ppd)
In [4]:
# convolve this filterbank with our demo image:
out = pu.image.gaborbank_convolve(img, frequencies=frequencies,
n_orientations=n_orientations,
ppd=ppd)
# out is a dict with a couple of entries:
print(out.keys())
print(out['f'])
print(out['theta'])
In [5]:
# res is a 4D numpy array containing the filter responses for each
# pixel, scale and orientation:
res = out['res'] # pull it out of the dict for easier reading.
res.shape
Out[5]:
In [6]:
# it contains the complex-valued Gabor responses.
# lowest SF scale, vertical orientation, sin component:
show_im(res[:, :, 0, 0].real)
In [7]:
# lowest SF scale, vertical orientation, cos component:
show_im(res[:, :, 0, 0].imag)
In [8]:
# compute the energy (phase invariant response) over all orientations.
# this is approximately the contrast at a given scale.
e = res.real**2 + res.imag**2
show_im(e.sum(axis=3)[:, :, 0]) # energy at lowest scale, over all orientations
In [9]:
show_im(e.sum(axis=3)[:, :, 2]) # energy at highest scale, over all orientations
In [10]:
# energy over all scales and orientations:
show_im(e.sum(axis=3).sum(axis=2))
In [11]:
# orientations, energy averaged over scales
show_im(pu.image.gaborbank_orientation_vis(out))
In [12]:
# show orientation estimates unweighted by filter energy:
oris = pu.image.gaborbank_mean_orientation(out)
plt.imshow(oris, cmap='hsv')
plt.colorbar();
In [13]:
phase = pu.image.gaborbank_phase_angle(out)
plt.imshow(phase[:, :, 0, 0], cmap='hsv')
plt.colorbar();
In [14]:
# check summing over scales:
summed_phase = phase.mean(axis=2)
plt.imshow(summed_phase[:, :, 0], cmap='hsv')
plt.colorbar();
In [15]:
show_im(pu.image.gaborbank_orientation_variance(out))
In [16]:
img = pu.im_data.orientation_test_2()
out = pu.image.gaborbank_convolve(img, frequencies=[1, 10],
n_orientations=12,
ppd=ppd)
ori_im = pu.image.gaborbank_orientation_vis(out)
show_im(ori_im)
In [17]:
# load a test image
img = pu.im_data.tiger_grey()
show_im(img)
In [18]:
out = pu.image.gaborbank_convolve(img, frequencies=[0.5, 1, 5, 10],
n_orientations=12,
ppd=ppd)
e = out['res'].real**2 + out['res'].imag**2
e = e.sum(axis=3).sum(axis=2)
show_im(e)
In [19]:
ori_im = pu.image.gaborbank_orientation_vis(out)
show_im(ori_im)
In [20]:
show_im(pu.image.gaborbank_orientation_variance(out))
By default the gaborbank_orientation_vis
function returns orientations estimated by the circular mean (average between filter responses). That is, it interpolates orientations between the filters. An alternative is to just take the orientation of the filter with maximum response to be the orientation at a given pixel. This is simpler but also means your answer depends strongly on how many orientation filters you use.
In [21]:
img = pu.im_data.orientation_test_2()
show_im(img)
In [22]:
out = pu.image.gaborbank_convolve(img, frequencies=[1, 10],
n_orientations=4,
ppd=ppd)
mean_ori = pu.image.gaborbank_mean_orientation(out) # mean resultant vector
max_ori = pu.image.gaborbank_max_orientation(out) # max filter
In [23]:
# lots of unique orientation values in the mean ori:
plt.hist(mean_ori.flatten());
In [24]:
# using the max response:
plt.hist(max_ori.flatten());
In [25]:
show_im(pu.image.gaborbank_orientation_vis(out, method='mean'))
In [26]:
show_im(pu.image.gaborbank_orientation_vis(out, method='max'))
Using the max response creates orientation bins rather than continuous interpolation.