In [1]:
from __future__ import division, print_function,absolute_import
import pylab as plt
import amitgroup.plot as gr
import numpy as np
import amitgroup as ag
import os
import pnet
import matplotlib.pylab as plot
from pnet.cyfuncs import index_map_pooling
from queue import Queue
def extract(ims,allLayers):
    #print(allLayers)
    curX = ims
    for layer in allLayers:
        #print('-------------')
        #print(layer)
        curX = layer.extract(curX)
        #print(np.array(curX).shape)
        #print('------------------')
    return curX

def partsPool(originalPartsRegion, numParts):
    partsGrid = np.zeros((1,1,numParts))
    for i in range(originalPartsRegion.shape[0]):
        for j in range(originalPartsRegion.shape[1]):
            if(originalPartsRegion[i,j]!=-1):
                partsGrid[0,0,originalPartsRegion[i,j]] = 1
    return partsGrid



def test(ims,labels,net):
    yhat = net.classify((ims,1000))
    return yhat == labels


/var/tmp/.local/lib/python3.4/site-packages/matplotlib/__init__.py:1157: UserWarning:  This call to matplotlib.use() has no effect
because the backend has already been chosen;
matplotlib.use() must be called *before* pylab, matplotlib.pyplot,
or matplotlib.backends is imported for the first time.

  warnings.warn(_use_error_msg)

In [2]:
#def trainPOP():
if pnet.parallel.main(__name__):
    #X = np.load("testMay151.npy")
    X = np.load("_3_100*6*6_1000*1*1_Jun_16.npy")
    model = X.item()
    # get num of Parts
    numParts = model['layers'][1]['num_parts']
    net = pnet.PartsNet.load_from_dict(model)
    allLayer = net.layers
    ims,labels = ag.io.load_mnist('training')
    trainingDataNum = 1000
    extractedFeature = extract(ims[0:trainingDataNum],allLayer[0:2])[0]
    print(extractedFeature.shape)
    extractedFeature = extractedFeature.reshape(extractedFeature.shape[0:3])
    partsPlot = np.zeros((numParts,6,6))
    partsCodedNumber = np.zeros(numParts)
    
    imgRegion= [[] for x in range(numParts)]
    partsRegion = [[] for x in range(numParts)]

    for i in range(trainingDataNum):
        codeParts = extractedFeature[i]
        for m in range(23):
            for n in range(23):
                if(codeParts[m,n]!=-1):
                    partsPlot[codeParts[m,n]]+=ims[i,m:m+6,n:n+6]
                    partsCodedNumber[codeParts[m,n]]+=1
    for j in range(numParts):
        partsPlot[j] = partsPlot[j]/partsCodedNumber[j]


    secondLayerCodedNumber = 0
    if 1:
        for i in range(trainingDataNum):
            codeParts = extractedFeature[i]
            for m in range(23)[3:20]:
                for n in range(23)[3:20]:
                    if(codeParts[m,n]!=-1):
                        imgRegion[codeParts[m,n]].append(ims[i,m-3:m+9,n-3:n+9])
                        secondLayerCodedNumber+=1
                        partsGrid = partsPool(codeParts[m-3:m+4,n-3:n+4],numParts)
                        partsRegion[codeParts[m,n]].append(partsGrid)
    
        
    ##second-layer parts
    numSecondLayerParts = 10
    allPartsLayer = [[pnet.PartsLayer(numSecondLayerParts,(1,1),
                        settings=dict(outer_frame = 0, 
                        threshold = 5, 
                        sample_per_image = 1, 
                        max_samples=10000, 
                        min_prob = 0.005))] 
                        for i in range(numParts)]
    allPartsLayerImg = np.zeros((numParts,numSecondLayerParts,12,12))
    allPartsLayerImgNumber = np.zeros((numParts,numSecondLayerParts))
    
    zeroParts = 0
    imgRegionPool = [[] for i in range(numParts * numSecondLayerParts)]
    for i in range(numParts):
        if(not partsRegion[i]):
            continue
        allPartsLayer[i][0].train_from_samples(np.array(partsRegion[i]),None)
        extractedFeaturePart = extract(np.array(partsRegion[i],dtype = np.uint8),allPartsLayer[i])[0]
        print(extractedFeaturePart.shape)
        for j in range(len(partsRegion[i])):
            if(extractedFeaturePart[j,0,0,0]!=-1):
                partIndex = extractedFeaturePart[j,0,0,0]
                allPartsLayerImg[i,partIndex]+=imgRegion[i][j]
                imgRegionPool[i * numSecondLayerParts + partIndex].append(imgRegion[i][j])
                allPartsLayerImgNumber[i,partIndex]+=1
            else:
                zeroParts+=1
    for i in range(numParts):
        for j in range(numSecondLayerParts):
            if(allPartsLayerImgNumber[i,j]):
                allPartsLayerImg[i,j] = allPartsLayerImg[i,j]/allPartsLayerImgNumber[i,j]

    
    """
    Visualize the SuperParts
    """
    settings = {'interpolation':'nearest','cmap':plot.cm.gray,}
    settings['vmin'] = 0
    settings['vmax'] = 1
    plotData = np.ones((14*100+2,14*(numSecondLayerParts + 1)+2))*0.8
    visualShiftParts = 0
    if 0:
        allPartsPlot = np.zeros((20,numSecondLayerParts + 1,12,12))
        gr.images(partsPlot.reshape(numParts,6,6),zero_to_one=False,vmin = 0, vmax = 1)
        allPartsPlot[:,0] = 0.5
        allPartsPlot[:,0,3:9,3:9] = partsPlot[20:40]
        allPartsPlot[:,1:,:,:] = allPartsLayerImg[20:40]
        gr.images(allPartsPlot.reshape(20 * (numSecondLayerParts + 1),12,12),zero_to_one=False, vmin = 0, vmax =1)
    elif 0:
        for i in range(numSecondLayerParts + 1):
            for j in range(100):
                if i == 0:
                    plotData[5 + j * 14:11 + j * 14, 5 + i * 14: 11 + i * 14] = partsPlot[j+visualShiftParts]
                else:
                    plotData[2 + j * 14:14 + j * 14,2 + i * 14: 14 + i * 14] = allPartsLayerImg[j+visualShiftParts,i-1]
        plot.figure(figsize=(10,40))
        plot.axis('off')
        plot.imshow(plotData, **settings)
        plot.savefig('test2.pdf',format='pdf',dpi=900)
    else:
        pass



    """
    Train A Class-Model Layer
    """
    
    digits = range(10)
    sup_ims = []
    sup_labels = []
    
    classificationTrainingNum = 100
    for d in digits:
        ims0 = ag.io.load_mnist('training', [d], selection = slice(classificationTrainingNum), return_labels = False)
        sup_ims.append(ims0)
        sup_labels.append(d * np.ones(len(ims0),dtype = np.int64))
    sup_ims = np.concatenate(sup_ims, axis = 0)
    sup_labels = np.concatenate(sup_labels,axis = 0)
    

    curX = extract(sup_ims,allLayer[0:2])[0]
    #print(curX.shape)
    curX = curX.reshape(curX.shape[0:3])
    secondLevelCurx = np.zeros((10 * classificationTrainingNum,17,17,1,1,numParts))
    secondLevelCurxCenter = np.zeros((10 * classificationTrainingNum,17,17))
    #for i in range(10 * classificationTrainingNum):
    #    codeParts = curX[i]
    for m in range(23)[3:20]:
        for n in range(23)[3:20]:
            secondLevelCurx[:,m-3,n-3] = index_map_pooling(curX[:,m-3:m+4,n-3:n+4],numParts,(7,7),(7,7))
            secondLevelCurxCenter[:,m-3,n-3] = curX[:,m,n]

    thirdLevelCurx = np.zeros((10 * classificationTrainingNum, 17,17))
    for i in range(int(10 * classificationTrainingNum)):
        for m in range(17):
            for n in range(17):
                if(secondLevelCurxCenter[i,m,n]!=-1):
                    firstLevelPartIndex = secondLevelCurxCenter[i,m,n]
                    #print(firstLevelPartIndex)
                    firstLevelPartIndex = int(firstLevelPartIndex)
                    extractedFeaturePart = extract(np.array(secondLevelCurx[i,m,n][np.newaxis,:],dtype = np.uint8),allPartsLayer[firstLevelPartIndex])[0]
                    #print("secondLayerExtraction")
                    #print(extractedFeaturePart.shape)
                    thirdLevelCurx[i,m,n] = int(numSecondLayerParts * firstLevelPartIndex + extractedFeaturePart)
                    #print(numSecondLayerParts,firstLevelPartIndex,extractedFeaturePart,thirdLevelCurx[i,m,n])
                else:
                    thirdLevelCurx[i,m,n] = -1
    
    print(thirdLevelCurx.shape)
    #return thirdLevelCurx,allPartsLayerImg 
    if 1:
        classificationLayers = [
                            pnet.PoolingLayer(shape = (4,4),strides = (4,4)),
                            pnet.MixtureClassificationLayer(n_components = 5, min_prob = 1e-7, block_size = 200)
                            #pnet.SVMClassificationLayer(C=1.0)
        ]
        classificationNet = pnet.PartsNet(classificationLayers)
        classificationNet.train((np.array(thirdLevelCurx[:,:,:,np.newaxis],dtype = np.int64),int(numParts * numSecondLayerParts)),sup_labels[:])
        print("Training Success!!")    
    
    if 1:    
        testImg,testLabels = ag.io.load_mnist('testing')
        testingNum = testLabels.shape[0]
        print("training extract Begin") 
        curTestX = extract(testImg, allLayer[0:2])[0]
        print("training extract End")
        curTestX = curTestX.reshape(curTestX.shape[0:3])
        secondLevelCurTestX = np.zeros((testingNum, 17,17,1,1,numParts))
        secondLevelCurTestXCenter = np.zeros((testingNum, 17,17))
        
        import time
        start = time.time()
        #for i in range(testingNum):
        #    codeParts = curTestX[i]
        for m in range(23)[3:20]:
            for n in range(23)[3:20]:
                secondLevelCurTestX[:,m-3,n-3] = index_map_pooling(curTestX[:,m-3:m+4,n-3:n+4],numParts,(7,7),(7,7))
                secondLevelCurTestXCenter[:,m-3,n-3] = curTestX[:,m,n]
        afterPool = time.time()
        print(afterPool - start)
        thirdLevelCurTestX = np.zeros((testingNum, 17, 17))
        featureMap = [[] for i in range(numParts)]
        for i in range(testingNum):
            for m in range(17):
                for n in range(17):
                    if(secondLevelCurTestXCenter[i,m,n]!=-1):
                        firstLevelPartIndex = int(secondLevelCurTestXCenter[i,m,n])
                        featureMap[firstLevelPartIndex].append(np.array(secondLevelCurTestX[i,m,n],dtype = np.uint8))
                        #extractedFeaturePart = extract(np.array(secondLevelCurTestX[i,m,n][np.newaxis,:],dtype = np.uint8),allPartsLayer[firstLevelPartIndex])[0]
                        #thirdLevelCurTestX[i,m,n] = numSecondLayerParts * firstLevelPartIndex + extractedFeaturePart
                    #else:
                        #thirdLevelCurTestX[i,m,n] = -1
        extractedFeatureMap = [Queue() for i in range(numParts)]
        for i in range(numParts):
            partFeatureMap = np.array(featureMap[i],dtype = np.uint8)
            allExtractedFeature = extract(np.array(partFeatureMap),allPartsLayer[i])[0]
            for feature in allExtractedFeature:
                extractedFeatureMap[i].put(feature)
        
        for i in range(testingNum):
            for m in range(17):
                for n in range(17):
                    if(secondLevelCurTestXCenter[i,m,n]!=-1):
                        firstLevelPartIndex = int(secondLevelCurTestXCenter[i,m,n])
                        if(extractedFeatureMap[firstLevelPartIndex].qsize()==0):
                            print("something is wrong")
                            extractedFeaturePart = -1
                        else:
                            extractedFeaturePart = extractedFeatureMap[firstLevelPartIndex].get()
                        thirdLevelCurTestX[i,m,n] = numSecondLayerParts * firstLevelPartIndex + extractedFeaturePart
                    else:
                        thirdLevelCurTestX[i,m,n] = -1
        end = time.time()
        print(end-afterPool)
        print(thirdLevelCurTestX.shape)
        testImg_Input = np.array(thirdLevelCurTestX[:,:,:,np.newaxis],dtype = np.int64) 
        testImg_batches = np.array_split(testImg_Input,200)
        testLabels_batches = np.array_split(testLabels, 200)
        
        args = [tup + (classificationNet,) for tup in zip(testImg_batches,testLabels_batches)]
        
        corrects = 0
        total = 0
        
        def format_error_rate(pr):
            return "{:.2f}%".format(100 * (1-pr))

        print("Testing Starting...")
        for i, res in enumerate(pnet.parallel.starmap_unordered(test,args)):
            if i !=0 and i % 20 ==0:
                print("{0:05}/{1:05} Error rate: {2}".format(total, len(ims),format_error_rate(pr)))

            corrects += res.sum()
            total += res.size

            pr = corrects / total
        
        print("Final error rate:", format_error_rate(pr))


(1000, 23, 23, 1)
(4572, 1, 1, 1)
(4444, 1, 1, 1)
(4162, 1, 1, 1)
(3392, 1, 1, 1)
(4112, 1, 1, 1)
(2670, 1, 1, 1)
(3670, 1, 1, 1)
(2312, 1, 1, 1)
(2548, 1, 1, 1)
(1608, 1, 1, 1)
(2969, 1, 1, 1)
(3577, 1, 1, 1)
(2205, 1, 1, 1)
(1775, 1, 1, 1)
(1723, 1, 1, 1)
(3668, 1, 1, 1)
(2158, 1, 1, 1)
(2378, 1, 1, 1)
(1752, 1, 1, 1)
(710, 1, 1, 1)
(2021, 1, 1, 1)
(1516, 1, 1, 1)
(3200, 1, 1, 1)
(1414, 1, 1, 1)
(2883, 1, 1, 1)
(2832, 1, 1, 1)
(1544, 1, 1, 1)
(2662, 1, 1, 1)
(1516, 1, 1, 1)
(1573, 1, 1, 1)
(836, 1, 1, 1)
(2200, 1, 1, 1)
(1427, 1, 1, 1)
(2186, 1, 1, 1)
(1415, 1, 1, 1)
(2889, 1, 1, 1)
(1776, 1, 1, 1)
(1452, 1, 1, 1)
(1165, 1, 1, 1)
(1653, 1, 1, 1)
(1629, 1, 1, 1)
(2078, 1, 1, 1)
(2482, 1, 1, 1)
(526, 1, 1, 1)
(1640, 1, 1, 1)
(2131, 1, 1, 1)
(1057, 1, 1, 1)
(1637, 1, 1, 1)
(1886, 1, 1, 1)
(1612, 1, 1, 1)
(1493, 1, 1, 1)
(1587, 1, 1, 1)
(1760, 1, 1, 1)
(1672, 1, 1, 1)
(1746, 1, 1, 1)
(1421, 1, 1, 1)
(2273, 1, 1, 1)
(1659, 1, 1, 1)
(1979, 1, 1, 1)
(1318, 1, 1, 1)
(2118, 1, 1, 1)
(1518, 1, 1, 1)
(2548, 1, 1, 1)
(1470, 1, 1, 1)
(2179, 1, 1, 1)
(1847, 1, 1, 1)
(1679, 1, 1, 1)
(1784, 1, 1, 1)
(2229, 1, 1, 1)
(1578, 1, 1, 1)
(1673, 1, 1, 1)
(2257, 1, 1, 1)
(1814, 1, 1, 1)
(1815, 1, 1, 1)
(1089, 1, 1, 1)
(1923, 1, 1, 1)
(2176, 1, 1, 1)
(1564, 1, 1, 1)
(1441, 1, 1, 1)
(1511, 1, 1, 1)
(1455, 1, 1, 1)
(1646, 1, 1, 1)
(1468, 1, 1, 1)
(1316, 1, 1, 1)
(1776, 1, 1, 1)
(1404, 1, 1, 1)
(1796, 1, 1, 1)
(2140, 1, 1, 1)
(1693, 1, 1, 1)
(1242, 1, 1, 1)
(1549, 1, 1, 1)
(1397, 1, 1, 1)
(1327, 1, 1, 1)
(1372, 1, 1, 1)
(1885, 1, 1, 1)
(1624, 1, 1, 1)
(1255, 1, 1, 1)
(1029, 1, 1, 1)
(1172, 1, 1, 1)
(903, 1, 1, 1)
(1000, 17, 17)
Training Success!!
training extract Begin
training extract End
1.511885643005371
38.589943647384644
(10000, 17, 17)
Testing Starting...
01000/60000 Error rate: 3.80%
02000/60000 Error rate: 4.50%
03000/60000 Error rate: 4.87%
04000/60000 Error rate: 4.97%
05000/60000 Error rate: 5.06%
06000/60000 Error rate: 4.47%
07000/60000 Error rate: 4.10%
08000/60000 Error rate: 3.73%
09000/60000 Error rate: 3.41%
Final error rate: 3.32%

In [2]:


In [6]:
print(allPartsLayerImgNumber)


[[ 785.  767.  463.  486.  221.  350.  441.  475.  419.  160.]
 [ 759.  365.  355.  569.  719.  506.  312.  318.  242.  297.]
 [ 895.  492.  566.  543.  254.  358.  303.  271.  338.  142.]
 [ 344.  417.  420.  336.  623.  240.  282.  182.  214.  330.]
 [ 762.  500.  459.  361.  411.  239.  449.  394.  181.  355.]
 [ 197.  419.  430.  274.  259.  368.  242.  181.  176.  119.]
 [ 721.  337.  390.  311.  256.  306.  332.  396.  420.  201.]
 [ 330.  249.  187.  252.  304.  280.  185.  252.  130.  143.]
 [ 495.  305.  161.  260.  186.  166.  312.  241.  181.  241.]
 [ 241.  229.  263.  139.  218.   52.   69.  119.  169.   86.]
 [ 708.  322.  414.  267.  200.  278.  155.  197.  191.  237.]
 [ 809.  295.  418.  343.  246.  264.  189.  353.  503.  157.]
 [ 287.  412.  147.  208.  230.  214.  163.  176.  162.  205.]
 [ 346.  167.  155.  235.  165.   95.  180.  174.  134.  124.]
 [ 317.  191.  223.  160.  165.  113.   75.  125.  207.  147.]
 [ 366.  338.  454.  481.  386.  411.  466.  165.  261.  329.]
 [ 474.  171.  321.  282.  290.  183.  142.  152.   26.  109.]
 [ 332.  305.  235.  244.  236.  223.  251.  122.  231.  199.]
 [ 237.  301.  136.  123.  254.  137.  106.  107.  236.  109.]
 [ 152.  109.   72.   58.   31.   82.   92.   25.   34.   55.]
 [ 298.  319.  191.  292.  206.  157.  128.  197.   91.  125.]
 [ 216.  148.  276.  167.  145.  156.   83.  106.  121.   98.]
 [ 451.  435.  344.  399.  300.  294.  172.  305.  264.  236.]
 [ 120.  250.  160.  124.  169.   54.  143.  162.  162.   70.]
 [ 402.  320.  320.  252.  301.  241.  337.  235.  323.  152.]
 [ 339.  403.  394.  320.  269.  216.  294.  235.  229.  133.]
 [ 337.  201.  110.  252.  143.  145.  114.  110.   62.   63.]
 [ 142.  306.  358.  331.  253.  289.  463.  203.  109.  208.]
 [ 211.  218.  187.  186.   86.  147.  140.  175.   94.   72.]
 [ 209.  198.  172.   89.  149.  185.  114.  215.  175.   66.]
 [  60.  125.   72.  119.   38.   78.   93.  155.   31.   65.]
 [ 221.  289.  183.  194.  236.  207.  170.  317.  257.  126.]
 [ 211.  276.  129.  182.  101.  162.   72.  141.  102.   46.]
 [ 371.  267.  199.  293.  254.  136.  264.  111.   80.  210.]
 [ 207.  295.  173.  126.   94.   98.  116.  107.  128.   71.]
 [ 498.  196.  274.  298.  531.  173.  266.  167.  222.  264.]
 [ 202.  268.  139.  253.  182.  175.  141.  120.  126.  170.]
 [ 255.  181.  172.   85.  154.  170.   78.  113.  109.  135.]
 [ 246.   99.  160.   73.   90.  105.  124.  100.   91.   77.]
 [ 160.  171.  239.  218.  176.  146.  165.  160.  117.  101.]
 [ 211.  260.  186.  135.  155.  104.  176.  145.  164.   93.]
 [ 251.  339.  337.  249.  183.   86.  186.  157.  161.  129.]
 [ 395.  276.  266.  379.   70.  260.  266.  142.  231.  197.]
 [  13.   62.   79.   90.   37.   57.   55.   28.   71.   34.]
 [ 214.  152.  109.  237.  178.  147.  214.  136.  147.  106.]
 [ 300.  267.  193.  261.  235.  204.  145.  178.  198.  150.]
 [ 101.  115.  155.   90.   43.   91.  170.   97.  101.   94.]
 [ 271.  244.  100.  133.  162.  136.  179.  123.  166.  115.]
 [ 155.  242.  276.  154.  159.  276.  171.  112.  198.  143.]
 [ 251.  295.  141.  112.  164.  188.  166.  114.   43.  138.]
 [ 270.  137.  198.  189.  203.  131.   59.   67.  125.  114.]
 [ 117.  192.  281.  153.  104.  126.  194.  153.  194.   73.]
 [ 140.  169.  229.  142.  224.  250.  254.  134.  142.   73.]
 [ 221.  110.  218.  103.  126.  188.  197.  166.  175.  168.]
 [ 230.  171.  186.  173.  246.  148.  123.  149.  146.  173.]
 [ 231.  190.  170.  206.  128.   92.   97.   90.   93.  124.]
 [ 236.  329.  189.  306.  152.  184.  195.  294.  210.  178.]
 [ 316.  207.  150.  185.  128.  132.  188.  155.  126.   72.]
 [ 195.  243.  269.  139.  154.  207.  199.  128.  341.  104.]
 [ 202.  147.  149.  140.  112.  151.  128.   86.  131.   72.]
 [ 271.  220.  237.  255.  163.  208.  109.  268.  214.  173.]
 [ 285.  104.  144.  257.  137.  209.   77.  116.   88.  101.]
 [ 364.  252.  234.  332.  337.  169.  137.  183.  198.  342.]
 [ 167.  141.  100.  193.  170.  120.  242.   89.  152.   96.]
 [ 364.  240.  258.  221.  207.  221.  130.  224.  146.  168.]
 [ 242.  279.  223.  318.  167.  124.  139.   87.  142.  126.]
 [ 209.  251.  203.  183.  121.  178.  142.   87.  177.  128.]
 [ 232.  212.  228.  196.  123.  120.  225.  137.  209.  102.]
 [ 394.  307.  213.  372.  247.  190.  210.  115.   74.  107.]
 [ 229.  120.  141.  161.  252.  239.  115.  149.  106.   66.]
 [ 248.  176.  122.  257.  210.  125.  175.  122.   89.  149.]
 [ 193.  223.  245.  196.  273.  291.  242.  253.  198.  143.]
 [ 217.  231.  192.  100.  224.  133.  254.  175.  196.   92.]
 [ 239.  145.  228.  241.  206.  185.  139.  175.  136.  121.]
 [ 113.  315.   86.   93.   68.   61.   72.  133.  104.   44.]
 [ 261.  169.  136.  217.  184.  162.  296.  157.  210.  131.]
 [ 369.  202.  210.  205.  141.  274.  116.  163.  176.  320.]
 [ 233.  241.  224.  159.   99.   83.  130.  149.  136.  110.]
 [ 156.  149.  149.  191.   94.  124.  153.  247.   76.  102.]
 [ 209.  185.  220.  118.  100.  111.  123.  187.  122.  136.]
 [ 177.  179.  153.  191.   85.  237.  117.  102.  109.  105.]
 [ 202.  165.  187.  144.  207.  204.  174.   86.  175.  102.]
 [ 201.  230.  139.  157.  205.  106.  137.  100.   98.   95.]
 [ 113.  136.  140.  150.  110.  117.  147.  121.  184.   98.]
 [ 324.  245.  127.  183.  108.  272.  120.   98.  249.   50.]
 [ 165.  180.  132.  120.  133.  130.  193.  117.   68.  166.]
 [ 271.  277.  139.   77.  295.  167.  181.  100.  206.   83.]
 [ 268.  214.  242.  218.  288.  173.  185.  173.  181.  198.]
 [ 132.  156.  214.  281.  140.  237.  112.  157.  128.  136.]
 [  69.  206.  135.  131.   91.   97.   64.  227.  114.  108.]
 [ 191.  167.   96.  123.  165.  167.   79.  228.  148.  185.]
 [ 117.  148.  127.  163.  171.  135.  110.  152.  109.  165.]
 [ 198.  104.   93.  149.  136.  199.  151.  119.   60.  118.]
 [ 183.  179.  250.  123.   79.  126.  157.  101.   98.   76.]
 [ 319.  159.  158.  174.  150.  251.   89.  157.  190.  238.]
 [ 144.  234.  139.  164.  235.  144.   62.  203.  199.  100.]
 [  62.  209.   85.   95.  132.  133.  150.  114.  148.  127.]
 [ 144.  140.  102.  110.   73.   79.  116.  105.   75.   85.]
 [ 167.  117.  135.  121.   74.  137.  123.  125.   93.   80.]
 [ 162.  126.   70.   78.   76.   81.   87.   76.   80.   67.]]

In [5]:
import pylab as plt
plt.imshow(allPartsLayerImgNumber,interpolation='nearest')


Out[5]:
<matplotlib.image.AxesImage at 0x7f21b4245dd8>

In [ ]: