See the other notebook for the grisly details and dead-ends.
Requirements:
numpy
scipy
scikit-learn
pillow
I recommend installing them with conda install
.
In [1]:
import numpy as np
import matplotlib.pyplot as plt
%matplotlib inline
In [2]:
from scipy import signal
In [3]:
nx, ny = 100, 100
z = np.random.rand(nx, ny)
sizex, sizey = 30, 30
x, y = np.mgrid[-sizex:sizex+1, -sizey:sizey+1]
g = np.exp(-0.333*(x**2/float(sizex)+y**2/float(sizey)))
f = g/g.sum()
z = signal.convolve(z, f, mode='valid')
z = (z - z.min())/(z.max() - z.min())
In [4]:
cd ~/Dropbox/dev/rainbow/notebooks
In [5]:
cmap = 'viridis' # Perceptual
cmap = 'spectral' # Classic rainbow
cmap = 'seismic' # Classic diverging
cmap = 'Accent' # Needs coolinearity constraint
cmap = 'Dark2' # Needs coolinearity constraint
cmap = 'Paired' # Needs coolinearity constraint, ultimate test!
cmap = 'gist_ncar' # Works with new cool-point start location
cmap = 'Pastel1' # Amazing that it works for start point
cmap = 'Set2' # Difficult
cmap = 'Dark2'
In [6]:
fig = plt.figure(frameon=False)
fig.set_size_inches(5,5)
ax = plt.Axes(fig, [0., 0., 1., 1.])
ax.set_axis_off()
fig.add_axes(ax)
# Note: interpolation introduces new colours.
plt.imshow(z, cmap=cmap, aspect='auto')
#plt.imshow(z, cmap=cmap, interpolation='none')
fig.savefig('data/cbar/test.png')
In [7]:
cmaps = [('Perceptually Uniform Sequential',
['viridis', 'inferno', 'plasma', 'magma']),
('Sequential', ['Blues', 'BuGn', 'BuPu',
'GnBu', 'Greens', 'Greys', 'Oranges', 'OrRd',
'PuBu', 'PuBuGn', 'PuRd', 'Purples', 'RdPu',
'Reds', 'YlGn', 'YlGnBu', 'YlOrBr', 'YlOrRd']),
('Sequential (2)', ['afmhot', 'autumn', 'bone', 'cool',
'copper', 'gist_heat', 'gray', 'hot',
'pink', 'spring', 'summer', 'winter']),
('Diverging', ['BrBG', 'bwr', 'coolwarm', 'PiYG', 'PRGn', 'PuOr',
'RdBu', 'RdGy', 'RdYlBu', 'RdYlGn', 'Spectral',
'seismic']),
('Qualitative', ['Accent', 'Dark2', 'Paired', 'Pastel1',
'Pastel2', 'Set1', 'Set2', 'Set3']),
('Miscellaneous', ['gist_earth', 'terrain', 'ocean', 'gist_stern',
'brg', 'CMRmap', 'cubehelix',
'gnuplot', 'gnuplot2', 'gist_ncar',
'nipy_spectral', 'jet', 'rainbow',
'gist_rainbow', 'hsv', 'flag', 'prism'])]
In [8]:
cd ~/Dropbox/dev/rainbow/notebooks
In [9]:
ls data/cbar
In [10]:
#import matplotlib.image as mpimg
from PIL import Image
# img = Image.open('data/cbar/boxer.png')
# img = Image.open('data/cbar/fluid.png')
# img = Image.open('data/cbar/lisa.png')
# img = Image.open('data/cbar/redblu.png')
#img = Image.open('data/cbar/seismic.png')
# img = Image.open('data/cbar/drainage.jpg')
#img = Image.open('data/cbar/test.png')
#img = Image.open('data/cbar/Colormap_Jet1.png') # Need's Matteo's stuff
# img = Image.open('data/cbar/candy.jpg') # Goes wrong at sharp edges
#img = Image.open('data/cbar/ceres_NASA_JPL-Caltech_UCLA_MPS_DLR_IDA.png') # Probably impossible
#img = Image.open('data/cbar/Gram_Ganssle.jpg') # Surprisingly bad — becase JPEG?
img = Image.open('data/cbar/fft_basics_crop.png') #
#img = Image.open('data/cbar/uruguay_seismic.jpg') #
#img = Image.open('data/cbar/tle_cover.png') #
#img = Image.open('data/cbar/salt_minibasin.jpg') #
img
Out[10]:
In [11]:
img.size
Out[11]:
In [272]:
img = img.crop([0,200,1494,1014])
img
Out[272]:
In [13]:
#img = img.resize((20,20))
In [14]:
# p = np.asarray(img)[..., :3] / 255.
# h, w, d = p.shape
# p = p.reshape((w * h, d))
In [15]:
# p.shape
In [12]:
n_colours = 100
In [13]:
from sklearn.cluster import KMeans
from sklearn.utils import shuffle
In [14]:
im = np.asarray(img)[..., :3] / 255.
h, w, d = im.shape
im_ = im.reshape((w * h, d))
# Define training set.
n = min(h*w//50, n_colours*10)
sample = shuffle(im_, random_state=0)[:n]
In [15]:
print("Sampled {} pixels".format(n))
Train:
In [16]:
kmeans = KMeans(n_clusters=n_colours).fit(sample)
Now I can make an RGB palette p
— also known as a codebook in information theory terms:
In [19]:
p = kmeans.cluster_centers_
# I don't know why I need to do this, but I do. Floating point precision maybe.
p[p > 1] = 1
p[p < 0] = 0
In [20]:
from mpl_toolkits.mplot3d import Axes3D
# Set up the figure
fig = plt.figure(figsize=(8, 8))
# Result of TSP solver
ax = fig.add_subplot(111, projection='3d')
ax.scatter(*p.T, c=p, lw=0, s=40, alpha=1)
ax.plot(*p.T, color='k', alpha=0.4)
ax.set_title('Codebook')
plt.show()
In [21]:
from sklearn.neighbors import BallTree
def mask_colours(a, tree, colours, tolerance=1e-3, leave=0):
target = tree.query_radius(colours, tolerance)
mask = np.ones(a.shape[0], dtype=bool)
end = None if leave < 2 else 1 - leave
for t in target:
mask[t[leave:end]] = False
return a[mask]
def remove_duplicates(a, tree, tolerance=1e-3):
for c in a:
a = mask_colours(a, tree, [c], leave=1)
return a
In [22]:
p.shape
Out[22]:
In [23]:
tree = BallTree(p)
In [24]:
newp = remove_duplicates(p, tree)
In [25]:
newp.shape
Out[25]:
In [26]:
tree = BallTree(newp)
In [27]:
def remove_isolates(a, tree):
d, i = tree.query(a, 2)
tol = np.median(d[:,1]) * 3.5
i = tree.query_radius(a, tol)
indices_of_isolates = [j for j, k in enumerate(i) if k.size < 7]
return np.delete(newp, indices_of_isolates, axis=0)
In [28]:
newp = remove_isolates(newp, tree)
In [29]:
newp.shape
Out[29]:
In [30]:
#newp = np.vstack([[[0, 0, 0]], newp])
newp = np.vstack([[[0, 0, 0]], newp])
In [31]:
from mpl_toolkits.mplot3d import Axes3D
# Set up the figure
fig = plt.figure(figsize=(8, 8))
# Result of TSP solver
ax = fig.add_subplot(111, projection='3d')
ax.scatter(*newp.T, c=newp, lw=0, s=40, alpha=1)
ax.plot(*newp.T, color='k', alpha=0.4)
ax.set_title('Codebook')
plt.show()
In [297]:
from pytsp import run, dumps_matrix
We need to start the traversal of the locus somewhere.
Poosible heuristics:
Blending the first two ideas, I am now starting at (0.25, 0, 0.5). It's the cool-point. (Only 0.25 red because if it's a toss-up between blue and red, I want blue.)
If I include the third idea — choose the dark-bluest end-point candidate — I think it should do better than it is now.
Aaaanyway, Adding this point allows us to start the TSP there, because it will move to the next nearest point — I think. We will remove it later.
Add cool-point to p
:
In [298]:
newp[:6]
Out[298]:
Remember that these points are essentially in random order. We are going to solve the find the shortest Hamiltonian path to organize them.
To do this, we need weights for the edges — and we'll use the distances between the points. There's a convenient function, scipy.spatial.pdist
for finding distances in n-space. We will use the 2-norm, but the pdist
function has a great many other metrics.
The convenience function squareform
organizes the distances into a matrix.
In [299]:
from scipy.spatial.distance import pdist, squareform
# Make distance matrix.
dists = squareform(pdist(newp, 'euclidean'))
# The values in `dists` are floats in the range 0 to sqrt(3).
# Normalize the values to int16s.
d = 32767 * dists / np.sqrt(3)
d = d.astype(np.int16)
# To use a TSP algo to solve the shortest Hamiltonian path problem,
# we need to add a point that is zero units from every other point.
row, col = dists.shape
d = np.insert(d, row, 0, axis=0)
d = np.insert(d, col, 0, axis=1)
The zero-point trick is legit. Reference from E. L. Lawler, Jan Karel Lenstra, A. H. G. Rinnooy Kan, D. B. Shmoys (1985). The Traveling Salesman Problem: A Guided Tour of Combinatorial Optimization, 1st Edition. Wiley. 476 pp. ISBN 978-0471904137.
In [300]:
d
Out[300]:
LKH implementation.
K. Helsgaun (2009). General k-opt submoves for the Lin-Kernighan TSP heuristic. Mathematical Programming Computation, 2009, doi: 10.1007/s12532-009-0004-6.
In [301]:
outf = "/tmp/myroute_lkh.tsp"
with open(outf, 'w') as f:
f.write(dumps_matrix(d, name="My Route"))
In [302]:
tour_lkh = run(outf, start=0, solver="LKH")
In [303]:
#result = np.array(tour_concorde['tour'])
result = np.array(tour_lkh['tour'])
In [304]:
result
Out[304]:
In [305]:
result.size # Should be n_colours + 2
Out[305]:
In [306]:
# e = np.asscalar(np.where(result == result.size-1)[0])
# if e == 1:
# # Then it's second and I think I know why.
# # As long as it went to the last point next, and I think
# # it necessarily does, then we're good.
# print("Zero-point is second. Probably dealt with it.")
# result = np.concatenate([result[:e], result[e+1::][::-1]])
# elif e == len(result)-1:
# # Then it's at the end already.
# print("Zero-point is at the end. Dealt with it.")
# result = result[:-1]
# else:
# # I'm not sure why this would happen... but I Think in this
# # case we can just skip it.
# print("Zero-point is somewhere weird. Maybe dealt with... BE CAREFUL.")
# result = result[result != result.size-1]
# assert len(result) == len(p)
In [307]:
#result = result[result != 64]
In [308]:
result
Out[308]:
Now result
is the indices of points for the shortest path, shape (256,)
. And p
is our quantized colormap, shape (256, 3)
. So we can select the points easily for an ordered colourmap.
The offsets are to account for the fact that we added a dark-blue point at the start and a zero point at the end.
In [309]:
c = newp[result[1:-2] ]
In [310]:
result.shape
Out[310]:
Ideally I'd like all the distances too, but it wouldn't be too hard to compute these.
Now let's look at it all.
In [311]:
from mpl_toolkits.mplot3d import Axes3D
# Set up the figure
fig = plt.figure(figsize=(8, 8))
# Result of TSP solver
ax = fig.add_subplot(111, projection='3d')
ax.scatter(*c.T, c=c, lw=0, s=40, alpha=1)
ax.plot(*c.T, color='k', alpha=0.4)
ax.text(*c[0], ' start')
ax.text(*c[-1], ' end')
ax.set_title('TSP solver')
plt.show()
Check below an interactive version of the 3D plot. May help when there are complicated paths between points. You need to install plotly and colorlover (with pip) if you don't already have them.
In [312]:
import plotly.graph_objs as go
import colorlover as cl
from plotly.offline import download_plotlyjs, init_notebook_mode, plot, iplot
init_notebook_mode(connected=True)
cb = cl.to_rgb(tuple(map(tuple, c*255)))
trace = go.Scatter3d(
name='TSP Sover',
x = c[:,0], y = c[:,1], z = c[:,2],
marker = dict(
size=4.,
color=cb
),
line=dict(
color='#000',
width=1,
),
)
data = [trace]
# Set the different layout properties of the figure:
layout = go.Layout(
autosize=False,
width=600,
height=600,
margin = dict(
t=0,b=0,l=0,r=0
),
scene = go.Scene(
xaxis=dict(
title='red',
gridcolor='rgb(255, 255, 255)',
zerolinecolor='rgb(255, 0, 0)',
showbackground=True,
backgroundcolor='rgb(230, 230,230)'
),
yaxis=dict(
title='green',
gridcolor='rgb(255, 255, 255)',
zerolinecolor='rgb(0, 255, 0)',
showbackground=True,
backgroundcolor='rgb(230, 230,230)'
),
zaxis=dict(
title='blue',
gridcolor='rgb(255, 255, 255)',
zerolinecolor='rgb(0, 0, 255)',
showbackground=True,
backgroundcolor='rgb(230, 230,230)'
),
aspectmode='cube',
camera=dict(
eye=dict(
x=1.7,
y=-1.7,
z=1,
)
),
)
)
fig = go.Figure(data=data, layout=layout)
iplot(fig, show_link=False)
In [313]:
# np.save('/home/matt/Dropbox/Public/raw_data.npy', p[1:])
# np.save('/home/matt/Dropbox/Public/ordered_data.npy', c)
In [314]:
from scipy.spatial import cKDTree
kdtree = cKDTree(c)
In [315]:
dx, ix = kdtree.query(im)
In [316]:
ix = ix.astype(np.float)
ix[dx>0.15] = np.nan # Remove anything that traveled too far
In [317]:
np.sqrt(3)/8
Out[317]:
In [318]:
plt.figure(figsize=(16,10))
plt.imshow(ix, cmap='gray')
plt.colorbar(shrink=0.6)
plt.show()
In [319]:
plt.imshow(dx, cmap='gray')
plt.colorbar()
plt.show()
In [320]:
fig = plt.figure(figsize=(18, 5))
ax0 = fig.add_subplot(131)
plt.imshow(im, interpolation='none')
ax0.set_title("Starting image")
ax1 = fig.add_subplot(132, projection='3d')
ax1.scatter(*c.T, c=c, lw=0, s=40, alpha=1)
ax1.plot(*c.T, color='k', alpha=0.5)
ax1.text(*c[0], ' start')
ax1.text(*c[-1], ' end')
ax1.set_title("Recovered cmap locus")
ax2 = fig.add_subplot(133)
plt.imshow(ix, cmap='jet', interpolation='none')
plt.colorbar(shrink=0.75)
ax2.set_title("Recovered data with known cmap")
plt.show()
In [221]:
cmaps = [('Perceptually Uniform Sequential',
['viridis', 'inferno', 'plasma', 'magma']),
('Sequential', ['Blues', 'BuGn', 'BuPu',
'GnBu', 'Greens', 'Greys', 'Oranges', 'OrRd',
'PuBu', 'PuBuGn', 'PuRd', 'Purples', 'RdPu',
'Reds', 'YlGn', 'YlGnBu', 'YlOrBr', 'YlOrRd']),
('Sequential (2)', ['afmhot', 'autumn', 'bone', 'cool',
'copper', 'gist_heat', 'gray', 'hot',
'pink', 'spring', 'summer', 'winter']),
('Diverging', ['BrBG', 'bwr', 'coolwarm', 'PiYG', 'PRGn', 'PuOr',
'RdBu', 'RdGy', 'RdYlBu', 'RdYlGn', 'Spectral',
'seismic']),
('Qualitative', ['Accent', 'Dark2', 'Paired', 'Pastel1',
'Pastel2', 'Set1', 'Set2', 'Set3']),
('Miscellaneous', ['gist_earth', 'terrain', 'ocean', 'gist_stern',
'brg', 'CMRmap', 'cubehelix',
'gnuplot', 'gnuplot2', 'gist_ncar',
'nipy_spectral', 'jet', 'rainbow',
'gist_rainbow', 'hsv', 'flag', 'prism'])]
In [222]:
# setting up color arrays
r1 = np.array(c)[:, 0] # value of Red for the nth sample
g1 = np.array(c)[:, 1] # value of Green for the nth sample
b1 = np.array(c)[:, 2] # value of Blue for the nth sample
r2 = r1 # value of Red at the nth sample
r0 = np.linspace(0, 1, len(r1)) # position of the nth Red sample within the range 0 to 1
g2 = g1 # value of Green at the nth sample
g0 = np.linspace(0, 1, len(g1)) # position of the nth Green sample within the range 0 to 1
b2 = b1 # value of Blue at the nth sample
b0 = np.linspace(0, 1, len(b1)) # position of the nth Blue sample within the range 0 to 1
# creating lists
R = zip(r0, r1, r2)
G = zip(g0, g1, g2)
B = zip(b0, b1, b2)
# creating list of above lists and transposing
RGB = zip(R, G, B)
rgb = zip(*RGB)
#print rgb
# creating dictionary
k = ['red', 'green', 'blue'] # makes list of keys
data_dict = dict(zip(k,rgb)) # makes a dictionary from list of keys and list of values
# Make a colourbar
import matplotlib.colors as clr
found_cmap = clr.LinearSegmentedColormap('my_colourmap', data_dict)
If we render the found data with the found colorbar, it will look perfect, because the colourbar is the codebook that maps the data to the original image.
In [223]:
fig = plt.figure(figsize=(18, 5))
ax0 = fig.add_subplot(121)
plt.imshow(ix, cmap=found_cmap, interpolation='none')
plt.colorbar(shrink=0.75)
ax0.set_title("Recovered data with recovered cmap")
ax1 = fig.add_subplot(122)
plt.imshow(im, cmap=found_cmap, interpolation='none')
plt.colorbar(shrink=0.75)
ax1.set_title("Original image with recovered cmap")
plt.show()
In [224]:
z_out = ix.astype(np.float)
z_out /= np.amax(z_out)
In [225]:
z.shape, z_out.shape
Out[225]:
The original data, z
, is a different shape. At the risk of distorting it, let's resize it.
In [226]:
from scipy.misc import imresize
z_ = imresize(z, z_out.shape) / 255
In [227]:
diff = z_ - z_out
In [228]:
def rms(a):
return np.sqrt(np.sum(a**2)/a.size)
print("RMS difference: {:.4f}".format(rms(diff)))
In [324]:
fig = plt.figure(figsize=(18, 5))
ax1 = fig.add_subplot(131)
plt.imshow(z_, cmap=cmap)
plt.colorbar(shrink=0.8)
ax1.set_title("Original data with known cmap")
ax2 = fig.add_subplot(132)
plt.imshow(z_out, cmap=cmap, interpolation='none')
plt.colorbar(shrink=0.8)
ax2.set_title("Recovered data with known cmap")
ax3 = fig.add_subplot(133)
plt.imshow(abs(diff), interpolation='none', cmap='gray')
plt.colorbar(shrink=0.8)
ax3.set_title("abs(diff); RMS: {:.4f}".format(rms(diff)))
plt.show()
In [325]:
cd ~/Dropbox/dev/rainbow/notebooks/
In [326]:
from utils import image_to_data
In [327]:
import matplotlib.pyplot as plt
%matplotlib inline
from PIL import Image
# img = Image.open('data/cbar/boxer.png')
# img = Image.open('data/cbar/fluid.png')
# img = Image.open('data/cbar/lisa.png')
# img = Image.open('data/cbar/redblu.png')
# img = Image.open('data/cbar/seismic.png')
# img = Image.open('data/cbar/drainage.jpg')
img = Image.open('data/cbar/test.png')
img
Out[327]:
In [328]:
data = image_to_data(img)
plt.imshow(data)
plt.show()
In [329]:
Image.fromarray(np.uint8(data*255))
Out[329]:
In [ ]:
In [ ]: