In [108]:
import sys
sys.path.append('../code/functions/')
import tiffIO as io

import math
import cv2
import time
import pickle

import numpy as np
import synapseLib as sl
import matplotlib.pyplot as plt

from skimage.exposure import equalize_adapthist
from scipy.ndimage.filters import convolve
from skimage.filters import sobel
from skimage.morphology import dilation
from scipy.spatial import KDTree
from random import randint

from plotly.offline import download_plotlyjs, init_notebook_mode, plot, iplot
import plotly.graph_objs as go
init_notebook_mode(connected=True)



In [2]:
data = np.array(io.loadTiff('../data/rr46b_s0_ch1.tif'))

In [3]:
axons, _ = sl.extractAxons(np.stack([equalize_adapthist(elem) for elem in data]), percentile=50)


/usr/local/lib/python2.7/dist-packages/skimage/util/dtype.py:110: UserWarning:

Possible precision loss when converting from float64 to uint16


In [18]:
plt.imshow(axons[15], cmap='gray')
plt.show()



In [76]:
img = data[15]
for kernelSize in [8]:
        img = convolve(img, np.ones((kernelSize, kernelSize)))
    
img = equalize_adapthist(img)
plt.imshow(img, cmap='gray')
plt.show()



In [61]:
axons2, _ = sl.extractAxons([img], percentile=50)

In [63]:
plt.imshow(axons2[0], cmap='gray')
plt.show()



In [4]:
def evolveAxons(data, epochs=2, n=2, neighborhood=16, dilations=5, percentile=50):
    species = data
    kernel = np.ones((8, 8))
    for i in range(epochs):
        print i
        genus, _ = sl.extractAxons(np.stack([equalize_adapthist(elem) for elem in species]),
                                     percentile=percentile,
                                     neighborhood=neighborhood,
                                     n = n,
                                     dilations=dilations)
        
        species = np.stack([convolve(elem, kernel) for elem in genus]).astype(np.int64)
        
    return species

In [5]:
axons = evolveAxons(data[10:20], epochs=10, dilations=10)


0
/usr/local/lib/python2.7/dist-packages/skimage/util/dtype.py:110: UserWarning:

Possible precision loss when converting from float64 to uint16

1
/usr/local/lib/python2.7/dist-packages/skimage/util/dtype.py:106: UserWarning:

Possible sign loss when converting negative image of type int64 to positive image of type uint16.

/usr/local/lib/python2.7/dist-packages/skimage/util/dtype.py:136: UserWarning:

Downcasting int64 to uint16 without scaling because max value 41 fits in uint16

/usr/local/lib/python2.7/dist-packages/skimage/util/dtype.py:136: UserWarning:

Downcasting int64 to uint16 without scaling because max value 49 fits in uint16

/usr/local/lib/python2.7/dist-packages/skimage/util/dtype.py:136: UserWarning:

Downcasting int64 to uint16 without scaling because max value 50 fits in uint16

/usr/local/lib/python2.7/dist-packages/skimage/util/dtype.py:136: UserWarning:

Downcasting int64 to uint16 without scaling because max value 55 fits in uint16

/usr/local/lib/python2.7/dist-packages/skimage/util/dtype.py:136: UserWarning:

Downcasting int64 to uint16 without scaling because max value 48 fits in uint16

/usr/local/lib/python2.7/dist-packages/skimage/util/dtype.py:136: UserWarning:

Downcasting int64 to uint16 without scaling because max value 57 fits in uint16

/usr/local/lib/python2.7/dist-packages/skimage/util/dtype.py:136: UserWarning:

Downcasting int64 to uint16 without scaling because max value 46 fits in uint16

/usr/local/lib/python2.7/dist-packages/skimage/util/dtype.py:136: UserWarning:

Downcasting int64 to uint16 without scaling because max value 40 fits in uint16

2
/usr/local/lib/python2.7/dist-packages/skimage/util/dtype.py:136: UserWarning:

Downcasting int64 to uint16 without scaling because max value 61 fits in uint16

/usr/local/lib/python2.7/dist-packages/skimage/util/dtype.py:136: UserWarning:

Downcasting int64 to uint16 without scaling because max value 58 fits in uint16

/usr/local/lib/python2.7/dist-packages/skimage/util/dtype.py:136: UserWarning:

Downcasting int64 to uint16 without scaling because max value 59 fits in uint16

3
/usr/local/lib/python2.7/dist-packages/skimage/util/dtype.py:136: UserWarning:

Downcasting int64 to uint16 without scaling because max value 62 fits in uint16

/usr/local/lib/python2.7/dist-packages/skimage/util/dtype.py:136: UserWarning:

Downcasting int64 to uint16 without scaling because max value 60 fits in uint16

4
/usr/local/lib/python2.7/dist-packages/skimage/util/dtype.py:136: UserWarning:

Downcasting int64 to uint16 without scaling because max value 63 fits in uint16

5
6
7
8
9

In [138]:
plt.imshow(axons[5], cmap='gray')
plt.show()



In [141]:
kernelX = [[1,0,-1],
           [2, 0, -2],
           [1, 0, -1]]

kernelY = [[1, 2, 1],
           [0, 0, 0],
           [-1, -2, -1]]

In [143]:
xGrad = convolve(axons[5], kernelX)
yGrad = convolve(axons[5], kernelY)
grad = np.sqrt(np.add(np.power(xGrad, 2), np.power(yGrad, 2)))

In [144]:
plt.imshow(grad, cmap='gray')
plt.show()



In [161]:
kernel = np.ones((16, 16))

kernel[1:15, 1:15] = 0
'''
kernel[7, 7] = -6
kernel[7, 8] = -6
kernel [8, 7] = -6
kernel[8, 8] = -6
'''


Out[161]:
'\nkernel[7, 7] = -6\nkernel[7, 8] = -6\nkernel [8, 7] = -6\nkernel[8, 8] = -6\n'

In [162]:
nodes = convolve(grad, kernel)
plt.imshow(nodes, cmap='gray')
plt.show()



In [166]:
test = np.logical_and(np.logical_xor(nodes, grad), axons[5])
plt.imshow(test, cmap='gray')
plt.show()



In [8]:
def generateNodeImg(axonImg, step=64):
    
    kernelX = [[1,0,-1],
           [2, 0, -2],
           [1, 0, -1]]

    kernelY = [[1, 2, 1],
           [0, 0, 0],
           [-1, -2, -1]]
    
    xGrad = convolve(axons[5], kernelX)
    yGrad = convolve(axons[5], kernelY)
    grad = np.sqrt(np.add(np.power(xGrad, 2), np.power(yGrad, 2)))
    
    symmetryKernel = np.ones((16, 16))
    symmetryKernel[1:15, 1:15] = 0
    symmetryKernel[7:9, 7:9] = -1
    
    potentialNodes = convolve(grad, symmetryKernel)
    
    nodes = np.multiply(np.logical_and(np.logical_xor(potentialNodes, grad), axonImg), axonImg) 
    
    nonMaxSuppression = np.zeros_like(nodes)
    
    for y in range(0, 1024, step):
        for x in range(0, 1024, step):
            sub = nodes[y:y+step, x:x+step]
            
            aMax = np.argmax(sub)
            yMax = aMax/step
            xMax = aMax%step
            
            nonMaxSuppression[y+yMax, x+xMax] = sub[yMax, xMax]
            
    return nonMaxSuppression

In [94]:
nodeImg = generateNodeImg(axons[5], 64)
plt.imshow(nodeImg, cmap='gray')
plt.show()



In [95]:
plt.imshow(axons[5], cmap='gray')
plt.show()



In [96]:
print np.count_nonzero(nodeImg)
print len(zip(*(np.nonzero(nodeImg))))


176
176

In [97]:
axCp = axons[5].copy()
for node in zip(*(np.nonzero(nodeImg))):
    axCp[node[0]-5:node[0]+5, node[1]-5:node[1]+5] = 255

plt.imshow(axCp, cmap='gray')
plt.show()



In [104]:
def generateGraph(nodeImg, axons, thickness=10, meanThresh = 50, devThresh = 10):
    axCp = axons.copy()
    
    aves = []
    devs = []
    potEdges = []
    edges = []
    
    nodes = zip(*(np.nonzero(nodeImg)))
    for i in range(len(nodes)):
        print i/float(len(nodes))
        for j in range(i+1, len(nodes)):
           
            y0, x0 = nodes[i]
            y1, x1 = nodes[j]
            
            length = int(np.hypot(x1-x0, y1-y0))
            x, y = np.linspace(x0, x1, length).astype(int), np.linspace(y0, y1, length).astype(int)                
            potEdges.append([y, x])    
            
            potEdgeStats = []
            for k in range(length):
                sub = axons[max(y[k]-thickness,0):min(y[k]+thickness, 1024), max(x[k]-thickness, 0):min(x[k]+thickness, 1024)]
                potEdgeStats.append(np.mean(sub))           
    
            aves.append(np.mean(potEdgeStats))
            devs.append(np.std(potEdgeStats))
    
    meanCut = np.percentile(aves, meanThresh)
    devCut = np.percentile(devs, devThresh)
    
    for i in range(len(potEdges)):
        if aves[i] > meanCut and devs[i] < devCut:
            edges.append(potEdges[i])
    
    for edge in edges:
        y = edge[0]
        x = edge[1]
        for k in range(len(y)):
            axCp[max(y[k]-3,0):min(y[k]+3, 1024), max(x[k]-3, 0):min(x[k]+3, 1024)] = 255
    
    return nodes, edges, axCp

In [105]:
nodes, edges, vis = generateGraph(nodeImg, axons[5])


0.0
0.00568181818182
0.0113636363636
0.0170454545455
0.0227272727273
0.0284090909091
0.0340909090909
0.0397727272727
0.0454545454545
0.0511363636364
0.0568181818182
0.0625
0.0681818181818
0.0738636363636
0.0795454545455
0.0852272727273
0.0909090909091
0.0965909090909
0.102272727273
0.107954545455
0.113636363636
0.119318181818
0.125
0.130681818182
0.136363636364
0.142045454545
0.147727272727
0.153409090909
0.159090909091
0.164772727273
0.170454545455
0.176136363636
0.181818181818
0.1875
0.193181818182
0.198863636364
0.204545454545
0.210227272727
0.215909090909
0.221590909091
0.227272727273
0.232954545455
0.238636363636
0.244318181818
0.25
0.255681818182
0.261363636364
0.267045454545
0.272727272727
0.278409090909
0.284090909091
0.289772727273
0.295454545455
0.301136363636
0.306818181818
0.3125
0.318181818182
0.323863636364
0.329545454545
0.335227272727
0.340909090909
0.346590909091
0.352272727273
0.357954545455
0.363636363636
0.369318181818
0.375
0.380681818182
0.386363636364
0.392045454545
0.397727272727
0.403409090909
0.409090909091
0.414772727273
0.420454545455
0.426136363636
0.431818181818
0.4375
0.443181818182
0.448863636364
0.454545454545
0.460227272727
0.465909090909
0.471590909091
0.477272727273
0.482954545455
0.488636363636
0.494318181818
0.5
0.505681818182
0.511363636364
0.517045454545
0.522727272727
0.528409090909
0.534090909091
0.539772727273
0.545454545455
0.551136363636
0.556818181818
0.5625
0.568181818182
0.573863636364
0.579545454545
0.585227272727
0.590909090909
0.596590909091
0.602272727273
0.607954545455
0.613636363636
0.619318181818
0.625
0.630681818182
0.636363636364
0.642045454545
0.647727272727
0.653409090909
0.659090909091
0.664772727273
0.670454545455
0.676136363636
0.681818181818
0.6875
0.693181818182
0.698863636364
0.704545454545
0.710227272727
0.715909090909
0.721590909091
0.727272727273
0.732954545455
0.738636363636
0.744318181818
0.75
0.755681818182
0.761363636364
0.767045454545
0.772727272727
0.778409090909
0.784090909091
0.789772727273
0.795454545455
0.801136363636
0.806818181818
0.8125
0.818181818182
0.823863636364
0.829545454545
0.835227272727
0.840909090909
0.846590909091
0.852272727273
0.857954545455
0.863636363636
0.869318181818
0.875
0.880681818182
0.886363636364
0.892045454545
0.897727272727
0.903409090909
0.909090909091
0.914772727273
0.920454545455
0.926136363636
0.931818181818
0.9375
0.943181818182
0.948863636364
0.954545454545
0.960227272727
0.965909090909
0.971590909091
0.977272727273
0.982954545455
0.988636363636
0.994318181818

In [106]:
plt.imshow(vis, cmap='gray')
plt.show()



In [157]:
def estimateGraph(nodeImg, axons, thickness=10, neighbors=6, baselineSize=10):
    axCp = axons.copy()
    
    baseline = []
    for i in range(baselineSize):
        y0, x0 = randint(0, axons.shape[0]), randint(0, axons.shape[1])
        y1, x1 = randint(0, axons.shape[0]), randint(0, axons.shape[1])
    
        length = int(np.hypot(x1-x0, y1-y0))
        x, y = np.linspace(x0, x1, length).astype(int), np.linspace(y0, y1, length).astype(int)   
        
        edgeStats = []
        for k in range(length):
            sub = axons[max(y[k]-thickness,0):min(y[k]+thickness, 1024), max(x[k]-thickness, 0):min(x[k]+thickness, 1024)]
            edgeStats.append(np.mean(sub))
        
        baseline.append(np.mean(edgeStats))
    
    baseMu = np.mean(baseline)
    baseSig = np.std(baseline)
    
    edges = []
    
    nodes = zip(*(np.nonzero(nodeImg)))
    tree = KDTree(nodes)
    for curIdx, node in enumerate(nodes):
        print curIdx/float(len(nodes))
        partnerIdxList = tree.query(node, k=neighbors)[1]
        partners = []
        for partnerIdx in partnerIdxList:
            if partnerIdx > curIdx:
                partners.append(nodes[partnerIdx])
                
        for partner in partners:
            y0, x0 = node
            y1, x1 = partner
            
            length = int(np.hypot(x1-x0, y1-y0))
            x, y = np.linspace(x0, x1, length).astype(int), np.linspace(y0, y1, length).astype(int)                
            
            edgeStats = []
            for k in range(length):
                sub = axons[max(y[k]-thickness,0):min(y[k]+thickness, 1024), max(x[k]-thickness, 0):min(x[k]+thickness, 1024)]
                edgeStats.append(np.mean(sub))           
    
            dp = np.mean(edgeStats)
            z = (dp - baseMu)/float(baseSig)
            if z > 1.5:
                edges.append([y, x])

    for edge in edges:
        y = edge[0]
        x = edge[1]
        for k in range(len(y)):
            axCp[max(y[k]-3,0):min(y[k]+3, 1024), max(x[k]-3, 0):min(x[k]+3, 1024)] = 255
    
    return nodes, edges, axCp

In [158]:
nodes, edges, vis = estimateGraph(nodeImg, axons[5])


0.0
0.00568181818182
0.0113636363636
0.0170454545455
0.0227272727273
0.0284090909091
0.0340909090909
0.0397727272727
0.0454545454545
0.0511363636364
0.0568181818182
0.0625
0.0681818181818
0.0738636363636
0.0795454545455
0.0852272727273
0.0909090909091
0.0965909090909
0.102272727273
0.107954545455
0.113636363636
0.119318181818
0.125
0.130681818182
0.136363636364
0.142045454545
0.147727272727
0.153409090909
0.159090909091
0.164772727273
0.170454545455
0.176136363636
0.181818181818
0.1875
0.193181818182
0.198863636364
0.204545454545
0.210227272727
0.215909090909
0.221590909091
0.227272727273
0.232954545455
0.238636363636
0.244318181818
0.25
0.255681818182
0.261363636364
0.267045454545
0.272727272727
0.278409090909
0.284090909091
0.289772727273
0.295454545455
0.301136363636
0.306818181818
0.3125
0.318181818182
0.323863636364
0.329545454545
0.335227272727
0.340909090909
0.346590909091
0.352272727273
0.357954545455
0.363636363636
0.369318181818
0.375
0.380681818182
0.386363636364
0.392045454545
0.397727272727
0.403409090909
0.409090909091
0.414772727273
0.420454545455
0.426136363636
0.431818181818
0.4375
0.443181818182
0.448863636364
0.454545454545
0.460227272727
0.465909090909
0.471590909091
0.477272727273
0.482954545455
0.488636363636
0.494318181818
0.5
0.505681818182
0.511363636364
0.517045454545
0.522727272727
0.528409090909
0.534090909091
0.539772727273
0.545454545455
0.551136363636
0.556818181818
0.5625
0.568181818182
0.573863636364
0.579545454545
0.585227272727
0.590909090909
0.596590909091
0.602272727273
0.607954545455
0.613636363636
0.619318181818
0.625
0.630681818182
0.636363636364
0.642045454545
0.647727272727
0.653409090909
0.659090909091
0.664772727273
0.670454545455
0.676136363636
0.681818181818
0.6875
0.693181818182
0.698863636364
0.704545454545
0.710227272727
0.715909090909
0.721590909091
0.727272727273
0.732954545455
0.738636363636
0.744318181818
0.75
0.755681818182
0.761363636364
0.767045454545
0.772727272727
0.778409090909
0.784090909091
0.789772727273
0.795454545455
0.801136363636
0.806818181818
0.8125
0.818181818182
0.823863636364
0.829545454545
0.835227272727
0.840909090909
0.846590909091
0.852272727273
0.857954545455
0.863636363636
0.869318181818
0.875
0.880681818182
0.886363636364
0.892045454545
0.897727272727
0.903409090909
0.909090909091
0.914772727273
0.920454545455
0.926136363636
0.931818181818
0.9375
0.943181818182
0.948863636364
0.954545454545
0.960227272727
0.965909090909
0.971590909091
0.977272727273
0.982954545455
0.988636363636
0.994318181818

In [160]:
class anatomy:
    def __init__(self, nodes, edges, vis):
        self._nodes = nodes
        self._edges = edges
        self._vis = vis

def generateAnatomyVolume(data):
    
    anatomyVolume = []
    
    axonImgs = evolveAxons(data, epochs=10, dilations=10)
    for axonImg in axonImgs:
        nodeImg = generateNodeImg(axonImg, 64)
        nodes, edges, vis = estimateGraph(nodeImg, axonImg)
        anatomyVolume.append(anatomy(nodes, edges, vis))
    
    return anatomyVolume

In [167]:
data2 = np.array(io.loadTiff('../data/rr46b_s1_ch1.tif')) 
start = time.time()
anatomyVol = generateAnatomyVolume(data2[10:15])


0
1
2
3
4
5
6
7
8
9
0.0
0.00471698113208
0.00943396226415
0.0141509433962
0.0188679245283
0.0235849056604
0.0283018867925
0.0330188679245
0.0377358490566
0.0424528301887
0.0471698113208
0.0518867924528
0.0566037735849
0.061320754717
0.0660377358491
0.0707547169811
0.0754716981132
0.0801886792453
0.0849056603774
0.0896226415094
0.0943396226415
0.0990566037736
0.103773584906
0.108490566038
0.11320754717
0.117924528302
0.122641509434
0.127358490566
0.132075471698
0.13679245283
0.141509433962
0.146226415094
0.150943396226
0.155660377358
0.160377358491
0.165094339623
0.169811320755
0.174528301887
0.179245283019
0.183962264151
0.188679245283
0.193396226415
0.198113207547
0.202830188679
0.207547169811
0.212264150943
0.216981132075
0.221698113208
0.22641509434
0.231132075472
0.235849056604
0.240566037736
0.245283018868
0.25
0.254716981132
0.259433962264
0.264150943396
0.268867924528
0.27358490566
0.278301886792
0.283018867925
0.287735849057
0.292452830189
0.297169811321
0.301886792453
0.306603773585
0.311320754717
0.316037735849
0.320754716981
0.325471698113
0.330188679245
0.334905660377
0.339622641509
0.344339622642
0.349056603774
0.353773584906
0.358490566038
0.36320754717
0.367924528302
0.372641509434
0.377358490566
0.382075471698
0.38679245283
0.391509433962
0.396226415094
0.400943396226
0.405660377358
0.410377358491
0.415094339623
0.419811320755
0.424528301887
0.429245283019
0.433962264151
0.438679245283
0.443396226415
0.448113207547
0.452830188679
0.457547169811
0.462264150943
0.466981132075
0.471698113208
0.47641509434
0.481132075472
0.485849056604
0.490566037736
0.495283018868
0.5
0.504716981132
0.509433962264
0.514150943396
0.518867924528
0.52358490566
0.528301886792
0.533018867925
0.537735849057
0.542452830189
0.547169811321
0.551886792453
0.556603773585
0.561320754717
0.566037735849
0.570754716981
0.575471698113
0.580188679245
0.584905660377
0.589622641509
0.594339622642
0.599056603774
0.603773584906
0.608490566038
0.61320754717
0.617924528302
0.622641509434
0.627358490566
0.632075471698
0.63679245283
0.641509433962
0.646226415094
0.650943396226
0.655660377358
0.660377358491
0.665094339623
0.669811320755
0.674528301887
0.679245283019
0.683962264151
0.688679245283
0.693396226415
0.698113207547
0.702830188679
0.707547169811
0.712264150943
0.716981132075
0.721698113208
0.72641509434
0.731132075472
0.735849056604
0.740566037736
0.745283018868
0.75
0.754716981132
0.759433962264
0.764150943396
0.768867924528
0.77358490566
0.778301886792
0.783018867925
0.787735849057
0.792452830189
0.797169811321
0.801886792453
0.806603773585
0.811320754717
0.816037735849
0.820754716981
0.825471698113
0.830188679245
0.834905660377
0.839622641509
0.844339622642
0.849056603774
0.853773584906
0.858490566038
0.86320754717
0.867924528302
0.872641509434
0.877358490566
0.882075471698
0.88679245283
0.891509433962
0.896226415094
0.900943396226
0.905660377358
0.910377358491
0.915094339623
0.919811320755
0.924528301887
0.929245283019
0.933962264151
0.938679245283
0.943396226415
0.948113207547
0.952830188679
0.957547169811
0.962264150943
0.966981132075
0.971698113208
0.97641509434
0.981132075472
0.985849056604
0.990566037736
0.995283018868
0.0
0.00478468899522
0.00956937799043
0.0143540669856
0.0191387559809
0.0239234449761
0.0287081339713
0.0334928229665
0.0382775119617
0.0430622009569
0.0478468899522
0.0526315789474
0.0574162679426
0.0622009569378
0.066985645933
0.0717703349282
0.0765550239234
0.0813397129187
0.0861244019139
0.0909090909091
0.0956937799043
0.1004784689
0.105263157895
0.11004784689
0.114832535885
0.11961722488
0.124401913876
0.129186602871
0.133971291866
0.138755980861
0.143540669856
0.148325358852
0.153110047847
0.157894736842
0.162679425837
0.167464114833
0.172248803828
0.177033492823
0.181818181818
0.186602870813
0.191387559809
0.196172248804
0.200956937799
0.205741626794
0.210526315789
0.215311004785
0.22009569378
0.224880382775
0.22966507177
0.234449760766
0.239234449761
0.244019138756
0.248803827751
0.253588516746
0.258373205742
0.263157894737
0.267942583732
0.272727272727
0.277511961722
0.282296650718
0.287081339713
0.291866028708
0.296650717703
0.301435406699
0.306220095694
0.311004784689
0.315789473684
0.320574162679
0.325358851675
0.33014354067
0.334928229665
0.33971291866
0.344497607656
0.349282296651
0.354066985646
0.358851674641
0.363636363636
0.368421052632
0.373205741627
0.377990430622
0.382775119617
0.387559808612
0.392344497608
0.397129186603
0.401913875598
0.406698564593
0.411483253589
0.416267942584
0.421052631579
0.425837320574
0.430622009569
0.435406698565
0.44019138756
0.444976076555
0.44976076555
0.454545454545
0.459330143541
0.464114832536
0.468899521531
0.473684210526
0.478468899522
0.483253588517
0.488038277512
0.492822966507
0.497607655502
0.502392344498
0.507177033493
0.511961722488
0.516746411483
0.521531100478
0.526315789474
0.531100478469
0.535885167464
0.540669856459
0.545454545455
0.55023923445
0.555023923445
0.55980861244
0.564593301435
0.569377990431
0.574162679426
0.578947368421
0.583732057416
0.588516746411
0.593301435407
0.598086124402
0.602870813397
0.607655502392
0.612440191388
0.617224880383
0.622009569378
0.626794258373
0.631578947368
0.636363636364
0.641148325359
0.645933014354
0.650717703349
0.655502392344
0.66028708134
0.665071770335
0.66985645933
0.674641148325
0.679425837321
0.684210526316
0.688995215311
0.693779904306
0.698564593301
0.703349282297
0.708133971292
0.712918660287
0.717703349282
0.722488038278
0.727272727273
0.732057416268
0.736842105263
0.741626794258
0.746411483254
0.751196172249
0.755980861244
0.760765550239
0.765550239234
0.77033492823
0.775119617225
0.77990430622
0.784688995215
0.789473684211
0.794258373206
0.799043062201
0.803827751196
0.808612440191
0.813397129187
0.818181818182
0.822966507177
0.827751196172
0.832535885167
0.837320574163
0.842105263158
0.846889952153
0.851674641148
0.856459330144
0.861244019139
0.866028708134
0.870813397129
0.875598086124
0.88038277512
0.885167464115
0.88995215311
0.894736842105
0.8995215311
0.904306220096
0.909090909091
0.913875598086
0.918660287081
0.923444976077
0.928229665072
0.933014354067
0.937799043062
0.942583732057
0.947368421053
0.952153110048
0.956937799043
0.961722488038
0.966507177033
0.971291866029
0.976076555024
0.980861244019
0.985645933014
0.99043062201
0.995215311005
0.0
0.00467289719626
0.00934579439252
0.0140186915888
0.018691588785
0.0233644859813
0.0280373831776
0.0327102803738
0.0373831775701
0.0420560747664
0.0467289719626
0.0514018691589
0.0560747663551
0.0607476635514
0.0654205607477
0.0700934579439
0.0747663551402
0.0794392523364
0.0841121495327
0.088785046729
0.0934579439252
0.0981308411215
0.102803738318
0.107476635514
0.11214953271
0.116822429907
0.121495327103
0.126168224299
0.130841121495
0.135514018692
0.140186915888
0.144859813084
0.14953271028
0.154205607477
0.158878504673
0.163551401869
0.168224299065
0.172897196262
0.177570093458
0.182242990654
0.18691588785
0.191588785047
0.196261682243
0.200934579439
0.205607476636
0.210280373832
0.214953271028
0.219626168224
0.224299065421
0.228971962617
0.233644859813
0.238317757009
0.242990654206
0.247663551402
0.252336448598
0.257009345794
0.261682242991
0.266355140187
0.271028037383
0.275700934579
0.280373831776
0.285046728972
0.289719626168
0.294392523364
0.299065420561
0.303738317757
0.308411214953
0.31308411215
0.317757009346
0.322429906542
0.327102803738
0.331775700935
0.336448598131
0.341121495327
0.345794392523
0.35046728972
0.355140186916
0.359813084112
0.364485981308
0.369158878505
0.373831775701
0.378504672897
0.383177570093
0.38785046729
0.392523364486
0.397196261682
0.401869158879
0.406542056075
0.411214953271
0.415887850467
0.420560747664
0.42523364486
0.429906542056
0.434579439252
0.439252336449
0.443925233645
0.448598130841
0.453271028037
0.457943925234
0.46261682243
0.467289719626
0.471962616822
0.476635514019
0.481308411215
0.485981308411
0.490654205607
0.495327102804
0.5
0.504672897196
0.509345794393
0.514018691589
0.518691588785
0.523364485981
0.528037383178
0.532710280374
0.53738317757
0.542056074766
0.546728971963
0.551401869159
0.556074766355
0.560747663551
0.565420560748
0.570093457944
0.57476635514
0.579439252336
0.584112149533
0.588785046729
0.593457943925
0.598130841121
0.602803738318
0.607476635514
0.61214953271
0.616822429907
0.621495327103
0.626168224299
0.630841121495
0.635514018692
0.640186915888
0.644859813084
0.64953271028
0.654205607477
0.658878504673
0.663551401869
0.668224299065
0.672897196262
0.677570093458
0.682242990654
0.68691588785
0.691588785047
0.696261682243
0.700934579439
0.705607476636
0.710280373832
0.714953271028
0.719626168224
0.724299065421
0.728971962617
0.733644859813
0.738317757009
0.742990654206
0.747663551402
0.752336448598
0.757009345794
0.761682242991
0.766355140187
0.771028037383
0.775700934579
0.780373831776
0.785046728972
0.789719626168
0.794392523364
0.799065420561
0.803738317757
0.808411214953
0.81308411215
0.817757009346
0.822429906542
0.827102803738
0.831775700935
0.836448598131
0.841121495327
0.845794392523
0.85046728972
0.855140186916
0.859813084112
0.864485981308
0.869158878505
0.873831775701
0.878504672897
0.883177570093
0.88785046729
0.892523364486
0.897196261682
0.901869158879
0.906542056075
0.911214953271
0.915887850467
0.920560747664
0.92523364486
0.929906542056
0.934579439252
0.939252336449
0.943925233645
0.948598130841
0.953271028037
0.957943925234
0.96261682243
0.967289719626
0.971962616822
0.976635514019
0.981308411215
0.985981308411
0.990654205607
0.995327102804
0.0
0.00467289719626
0.00934579439252
0.0140186915888
0.018691588785
0.0233644859813
0.0280373831776
0.0327102803738
0.0373831775701
0.0420560747664
0.0467289719626
0.0514018691589
0.0560747663551
0.0607476635514
0.0654205607477
0.0700934579439
0.0747663551402
0.0794392523364
0.0841121495327
0.088785046729
0.0934579439252
0.0981308411215
0.102803738318
0.107476635514
0.11214953271
0.116822429907
0.121495327103
0.126168224299
0.130841121495
0.135514018692
0.140186915888
0.144859813084
0.14953271028
0.154205607477
0.158878504673
0.163551401869
0.168224299065
0.172897196262
0.177570093458
0.182242990654
0.18691588785
0.191588785047
0.196261682243
0.200934579439
0.205607476636
0.210280373832
0.214953271028
0.219626168224
0.224299065421
0.228971962617
0.233644859813
0.238317757009
0.242990654206
0.247663551402
0.252336448598
0.257009345794
0.261682242991
0.266355140187
0.271028037383
0.275700934579
0.280373831776
0.285046728972
0.289719626168
0.294392523364
0.299065420561
0.303738317757
0.308411214953
0.31308411215
0.317757009346
0.322429906542
0.327102803738
0.331775700935
0.336448598131
0.341121495327
0.345794392523
0.35046728972
0.355140186916
0.359813084112
0.364485981308
0.369158878505
0.373831775701
0.378504672897
0.383177570093
0.38785046729
0.392523364486
0.397196261682
0.401869158879
0.406542056075
0.411214953271
0.415887850467
0.420560747664
0.42523364486
0.429906542056
0.434579439252
0.439252336449
0.443925233645
0.448598130841
0.453271028037
0.457943925234
0.46261682243
0.467289719626
0.471962616822
0.476635514019
0.481308411215
0.485981308411
0.490654205607
0.495327102804
0.5
0.504672897196
0.509345794393
0.514018691589
0.518691588785
0.523364485981
0.528037383178
0.532710280374
0.53738317757
0.542056074766
0.546728971963
0.551401869159
0.556074766355
0.560747663551
0.565420560748
0.570093457944
0.57476635514
0.579439252336
0.584112149533
0.588785046729
0.593457943925
0.598130841121
0.602803738318
0.607476635514
0.61214953271
0.616822429907
0.621495327103
0.626168224299
0.630841121495
0.635514018692
0.640186915888
0.644859813084
0.64953271028
0.654205607477
0.658878504673
0.663551401869
0.668224299065
0.672897196262
0.677570093458
0.682242990654
0.68691588785
0.691588785047
0.696261682243
0.700934579439
0.705607476636
0.710280373832
0.714953271028
0.719626168224
0.724299065421
0.728971962617
0.733644859813
0.738317757009
0.742990654206
0.747663551402
0.752336448598
0.757009345794
0.761682242991
0.766355140187
0.771028037383
0.775700934579
0.780373831776
0.785046728972
0.789719626168
0.794392523364
0.799065420561
0.803738317757
0.808411214953
0.81308411215
0.817757009346
0.822429906542
0.827102803738
0.831775700935
0.836448598131
0.841121495327
0.845794392523
0.85046728972
0.855140186916
0.859813084112
0.864485981308
0.869158878505
0.873831775701
0.878504672897
0.883177570093
0.88785046729
0.892523364486
0.897196261682
0.901869158879
0.906542056075
0.911214953271
0.915887850467
0.920560747664
0.92523364486
0.929906542056
0.934579439252
0.939252336449
0.943925233645
0.948598130841
0.953271028037
0.957943925234
0.96261682243
0.967289719626
0.971962616822
0.976635514019
0.981308411215
0.985981308411
0.990654205607
0.995327102804
0.0
0.00467289719626
0.00934579439252
0.0140186915888
0.018691588785
0.0233644859813
0.0280373831776
0.0327102803738
0.0373831775701
0.0420560747664
0.0467289719626
0.0514018691589
0.0560747663551
0.0607476635514
0.0654205607477
0.0700934579439
0.0747663551402
0.0794392523364
0.0841121495327
0.088785046729
0.0934579439252
0.0981308411215
0.102803738318
0.107476635514
0.11214953271
0.116822429907
0.121495327103
0.126168224299
0.130841121495
0.135514018692
0.140186915888
0.144859813084
0.14953271028
0.154205607477
0.158878504673
0.163551401869
0.168224299065
0.172897196262
0.177570093458
0.182242990654
0.18691588785
0.191588785047
0.196261682243
0.200934579439
0.205607476636
0.210280373832
0.214953271028
0.219626168224
0.224299065421
0.228971962617
0.233644859813
0.238317757009
0.242990654206
0.247663551402
0.252336448598
0.257009345794
0.261682242991
0.266355140187
0.271028037383
0.275700934579
0.280373831776
0.285046728972
0.289719626168
0.294392523364
0.299065420561
0.303738317757
0.308411214953
0.31308411215
0.317757009346
0.322429906542
0.327102803738
0.331775700935
0.336448598131
0.341121495327
0.345794392523
0.35046728972
0.355140186916
0.359813084112
0.364485981308
0.369158878505
0.373831775701
0.378504672897
0.383177570093
0.38785046729
0.392523364486
0.397196261682
0.401869158879
0.406542056075
0.411214953271
0.415887850467
0.420560747664
0.42523364486
0.429906542056
0.434579439252
0.439252336449
0.443925233645
0.448598130841
0.453271028037
0.457943925234
0.46261682243
0.467289719626
0.471962616822
0.476635514019
0.481308411215
0.485981308411
0.490654205607
0.495327102804
0.5
0.504672897196
0.509345794393
0.514018691589
0.518691588785
0.523364485981
0.528037383178
0.532710280374
0.53738317757
0.542056074766
0.546728971963
0.551401869159
0.556074766355
0.560747663551
0.565420560748
0.570093457944
0.57476635514
0.579439252336
0.584112149533
0.588785046729
0.593457943925
0.598130841121
0.602803738318
0.607476635514
0.61214953271
0.616822429907
0.621495327103
0.626168224299
0.630841121495
0.635514018692
0.640186915888
0.644859813084
0.64953271028
0.654205607477
0.658878504673
0.663551401869
0.668224299065
0.672897196262
0.677570093458
0.682242990654
0.68691588785
0.691588785047
0.696261682243
0.700934579439
0.705607476636
0.710280373832
0.714953271028
0.719626168224
0.724299065421
0.728971962617
0.733644859813
0.738317757009
0.742990654206
0.747663551402
0.752336448598
0.757009345794
0.761682242991
0.766355140187
0.771028037383
0.775700934579
0.780373831776
0.785046728972
0.789719626168
0.794392523364
0.799065420561
0.803738317757
0.808411214953
0.81308411215
0.817757009346
0.822429906542
0.827102803738
0.831775700935
0.836448598131
0.841121495327
0.845794392523
0.85046728972
0.855140186916
0.859813084112
0.864485981308
0.869158878505
0.873831775701
0.878504672897
0.883177570093
0.88785046729
0.892523364486
0.897196261682
0.901869158879
0.906542056075
0.911214953271
0.915887850467
0.920560747664
0.92523364486
0.929906542056
0.934579439252
0.939252336449
0.943925233645
0.948598130841
0.953271028037
0.957943925234
0.96261682243
0.967289719626
0.971962616822
0.976635514019
0.981308411215
0.985981308411
0.990654205607
0.995327102804

In [181]:
plt.imshow(equalize_adapthist(data2[10])*1000, cmap='gray')
plt.show()



In [174]:
plt.imshow(anatomyVol[0]._vis, cmap='gray')
plt.show()



In [159]:
plt.imshow(vis, cmap='gray')
plt.show()



In [ ]: