In [139]:
# import modules
%matplotlib inline
import random
import pylab
import pandas as pd
import numpy as np
import cPickle as pkl
from PIL import Image
from lasagne import layers, updates
from theano.tensor.nnet import softmax
from nolearn.lasagne import NeuralNet, BatchIterator
from sklearn.cross_validation import train_test_split

In [63]:
# test image
from scipy.misc import imread as ims
img = ims('/home/faizy/workspace/project/project/datasets/svt/svt1/img/00_13.jpg')#[292:450, 176:850, :]#img -> 00_12
print img.shape
# image[y:y + image_height, x:x + image_width, :]
pylab.imshow(img)
pylab.show()


(768, 1024, 3)

In [11]:
# extra functions
def unpickle(filer):
    f = open(filer, 'rb')
    d_dict = pkl.load(f)
    f.close()
    return d_dict

In [121]:
def shiftup(dataset):
    shifted_dataset = np.zeros(dataset.shape)
    # loop for images
    for i in range(dataset.shape[0]):
        # loop for shift up
        for j in range(32 - 10):
            shifted_dataset[i, 0, j:j+1, :] = dataset[i, 0, 10 + j : 10 + j + 1, :]
        for j in range(32 - 10, 32):
            shifted_dataset[i, 0, j:j+1, :] = shifted_dataset[i, :, 32 - 10 - 1, :]
    return shifted_dataset

In [7]:
def shiftdown(dataset):
    shifted_dataset = np.zeros(dataset.shape)
    # loop for images
    for i in range(dataset.shape[0]):
        # loop for shift up
        for j in range(10, 32):
            shifted_dataset[i, 0, j:j+1, :] = dataset[i, 0, j - 10 : j + 1 - 10, :]
        for j in range(10):
            shifted_dataset[i, 0, j:j+1, :] = shifted_dataset[i, :, 10, :]
    return shifted_dataset

In [62]:
def shiftleft(dataset):
    shifted_dataset = np.zeros(dataset.shape)
    # loop for images
    for i in range(dataset.shape[0]):
        for j in range(32 - 10):
            shifted_dataset[i, 0, :, j:j+1] = dataset[i, 0, :, 10 + j: 10 + j + 1]
        for j in range(32 - 10, 32):
            shifted_dataset[i, :, :, j] = shifted_dataset[i, :, :, 32 - 10 - 1]
    
    return shifted_dataset

In [89]:
def shiftright(dataset):
    shifted_dataset = np.zeros(dataset.shape)
    # loop for images
    for i in range(dataset.shape[0]):
        for j in range(10, 32):
            shifted_dataset[i, 0, :, j : j + 1] = dataset[i, 0, :, j - 10 : j + 1 - 10]
        for j in range(10):
            shifted_dataset[i, 0, :, j] = dataset[i, 0, :, 0]
    
    return shifted_dataset

In [12]:
# load train_test set
# cifar
train_dict = unpickle('/home/faizy/workspace/cifar/cifar-10-batches-py/data_batch_1')
train2_images = train_dict['data'].astype('float32')
train2_y = np.zeros((10000, )).astype('int')
test_dict = unpickle('/home/faizy/workspace/cifar/cifar-10-batches-py/test_batch')
test2_images = test_dict['data'].astype('float32')

# chars74k
data = pd.read_csv('/home/faizy/workspace/project/project/scripts/LISTFILE.txt', sep = ' ', header = None)
root = '/home/faizy/workspace/project/project/datasets/English/'
data_x = np.zeros((data.shape[0], 1, 32, 32))
data_y = np.ones((data.shape[0], )).astype('int32')
from scipy.misc import imread, imresize
for idx, path in enumerate(data[0]):
    img = imread(root + path)
    img = imresize(img, (32, 32))
    if len(img.shape) == 3:
        data_x[idx, ...] = img.dot([0.299, 0.587, 0.144])
    else:
        data_x[idx, ...] = img
        
data_x = data_x.astype('float32')
train1_x, test1_x, train1_y, test1_y = train_test_split(data_x, data_y, test_size = 0.2)

In [122]:
# Augmented data
train3_x = shiftup(train1_x)
train4_x = shiftdown(train1_x)
train5_x = shiftleft(train1_x)
train6_x = shiftright(train1_x)

train3_y = np.zeros((train3_x.shape[0], )).astype('int')
train4_y = np.zeros((train4_x.shape[0], )).astype('int')
train5_y = np.zeros((train5_x.shape[0], )).astype('int')
train6_y = np.zeros((train6_x.shape[0], )).astype('int')

In [126]:
print train1_x.shape, train2_images.shape, train3_x.shape, train4_x.shape, train5_x.shape, train6_x.shape


(6164, 1, 32, 32) (10000, 3072) (6164, 1, 32, 32) (6164, 1, 32, 32) (6164, 1, 32, 32) (6164, 1, 32, 32)

In [127]:
# preprocess
# cifar
train2_images /= train2_images.std(axis = None)
train2_images -= train2_images.mean()

test2_images /= test2_images.std(axis = None)
test2_images -= test2_images.mean()

# chars74k
train1_x /= train1_x.std(axis = None)
train1_x -= train1_x.mean()

test1_x /= test1_x.std(axis = None)
test1_x -= test1_x.mean()

# augmented data

train3_x /= train3_x.std(axis = None)
train3_x -= train3_x.mean()

train4_x /= train4_x.std(axis = None)
train4_x -= train4_x.mean()

train5_x /= train5_x.std(axis = None)
train5_x -= train5_x.mean()

train6_x /= train6_x.std(axis = None)
train6_x -= train6_x.mean()

In [128]:
# reshape dataset
# cifar
# grayscaling and cropping to size
train2_x_rgb = train2_images.reshape(-1, 3, 32, 32).transpose(0, 2, 3, 1)

train2_x = np.zeros((10000, 1, 32, 32))
for i in range(10000):
    train2_x[i, :, :, :] = np.dot(train2_x_rgb[i, :, :, :], [0.299, 0.587, 0.144])

test2_x_rgb = test2_images.reshape(-1, 3, 32, 32).transpose(0, 2, 3, 1)

test2_x = np.zeros((10000, 1, 32, 32))
for i in range(10000):
    test2_x[i, :, :, :] = np.dot(test2_x_rgb[i, :, :, :], [0.299, 0.587, 0.144])

In [134]:
# finally
#train_x = np.vstack((train1_x, train2_x))
train_x = np.vstack((train1_x, train3_x))
train_x = np.vstack((train_x, train4_x))
train_x = np.vstack((train_x, train5_x))
train_x = np.vstack((train_x, train6_x))

train_y = np.concatenate([train1_y, train3_y, train4_y, train5_y, train6_y])#train2_y, 

test_x = test1_x#np.vstack((test1_x, test2_x))

test_y = test1_y

In [135]:
# check again
train_x = train_x.astype('float32')
train_y = train_y.astype('int32')
test_x = test_x.astype('float32')
test_y = test_y.astype('int32')

In [142]:
# setting nn 
net = NeuralNet(
    layers = [
        ('input', layers.InputLayer),
        ('conv1', layers.Conv2DLayer),
        ('pool1', layers.MaxPool2DLayer),
        ('dropout1', layers.DropoutLayer),
        ('hidden4', layers.DenseLayer),
        ('output', layers.DenseLayer),
    ],

    input_shape = (None, 1, 32, 32),
    conv1_num_filters = 32, conv1_filter_size = (10, 10),
    pool1_pool_size = (2, 2),
    dropout1_p = 0.2,
    hidden4_num_units = 128,
    output_num_units = 2, output_nonlinearity = softmax,

    batch_iterator_train = BatchIterator(batch_size = 1000),
    batch_iterator_test = BatchIterator(batch_size = 1000),

    update=updates.adam,

    use_label_encoder = True,
    regression = False,
    max_epochs = 500,
    verbose = 1,
)

In [143]:
# train and test nn
net.fit(train_x, train_y)
pred = net.predict(test_x)


# Neural Network with 499234 learnable parameters

## Layer information

  #  name      size
---  --------  --------
  0  input     1x32x32
  1  conv1     32x23x23
  2  pool1     32x11x11
  3  dropout1  32x11x11
  4  hidden4   128
  5  output    2

  epoch    train loss    valid loss    train/val    valid acc  dur
-------  ------------  ------------  -----------  -----------  -----
      1       2.04943       0.68469      2.99323      0.82386  8.94s
      2       0.68016       0.67585      1.00637      0.82386  8.68s
      3       0.67224       0.66743      1.00722      0.82386  8.69s
      4       0.66452       0.65923      1.00803      0.82386  8.69s
      5       0.65704       0.65129      1.00883      0.82386  14.68s
      6       0.64983       0.64363      1.00962      0.82386  8.68s
      7       0.64290       0.63627      1.01041      0.82386  8.68s
      8       0.63626       0.62921      1.01120      0.82386  10.16s
      9       0.62990       0.62243      1.01200      0.82386  9.17s
     10       0.62381       0.61593      1.01280      0.82386  8.79s
     11       0.61800       0.60971      1.01359      0.82386  9.59s
     12       0.61244       0.60375      1.01439      0.82386  8.68s
     13       0.60713       0.59805      1.01519      0.82386  8.68s
     14       0.60207       0.59259      1.01599      0.82386  8.68s
     15       0.59723       0.58738      1.01679      0.82386  8.69s
     16       0.59262       0.58238      1.01758      0.82386  8.69s
     17       0.58822       0.57761      1.01837      0.82386  8.69s
     18       0.58403       0.57305      1.01916      0.82386  8.68s
     19       0.58002       0.56868      1.01995      0.82386  8.69s
     20       0.57621       0.56451      1.02073      0.82386  8.68s
     21       0.57257       0.56052      1.02150      0.82386  8.68s
     22       0.56910       0.55670      1.02227      0.82386  8.68s
     23       0.56580       0.55305      1.02304      0.82386  8.69s
     24       0.56264       0.54957      1.02380      0.82386  8.68s
     25       0.55964       0.54623      1.02455      0.82386  8.68s
     26       0.55677       0.54304      1.02529      0.82386  8.68s
     27       0.55404       0.53999      1.02603      0.82386  8.68s
     28       0.55144       0.53707      1.02675      0.82386  8.68s
     29       0.54896       0.53428      1.02747      0.82386  8.68s
     30       0.54659       0.53161      1.02818      0.82386  8.69s
     31       0.54434       0.52906      1.02888      0.82386  8.68s
     32       0.54219       0.52661      1.02958      0.82386  8.70s
     33       0.54014       0.52427      1.03026      0.82386  8.99s
     34       0.53818       0.52204      1.03093      0.82386  9.31s
     35       0.53632       0.51990      1.03160      0.82386  8.69s
     36       0.53454       0.51784      1.03225      0.82386  8.69s
     37       0.53285       0.51588      1.03289      0.82386  8.68s
     38       0.53124       0.51400      1.03353      0.82386  8.68s
     39       0.52970       0.51220      1.03415      0.82386  8.68s
     40       0.52823       0.51048      1.03476      0.82386  8.92s
     41       0.52682       0.50883      1.03537      0.82386  8.68s
     42       0.52549       0.50725      1.03596      0.82386  8.97s
     43       0.52421       0.50573      1.03654      0.82386  8.68s
     44       0.52300       0.50428      1.03712      0.82386  8.68s
     45       0.52184       0.50289      1.03768      0.82386  8.68s
     46       0.52073       0.50155      1.03823      0.82386  8.68s
     47       0.51967       0.50027      1.03878      0.82386  8.68s
     48       0.51867       0.49905      1.03931      0.82386  8.68s
     49       0.51770       0.49787      1.03983      0.82386  8.68s
     50       0.51679       0.49674      1.04035      0.82386  8.68s
     51       0.51591       0.49566      1.04085      0.82386  8.68s
     52       0.51507       0.49462      1.04135      0.82386  8.68s
     53       0.51428       0.49363      1.04183      0.82386  8.68s
     54       0.51351       0.49267      1.04231      0.82386  8.68s
     55       0.51279       0.49175      1.04278      0.82386  8.68s
     56       0.51209       0.49087      1.04323      0.82386  8.68s
     57       0.51143       0.49002      1.04368      0.82386  8.69s
     58       0.51080       0.48921      1.04412      0.82386  8.68s
     59       0.51019       0.48843      1.04455      0.82386  8.68s
     60       0.50961       0.48768      1.04498      0.82386  8.68s
     61       0.50906       0.48696      1.04539      0.82386  8.68s
     62       0.50854       0.48627      1.04580      0.82386  8.68s
     63       0.50803       0.48560      1.04619      0.82386  8.68s
     64       0.50755       0.48496      1.04658      0.82386  8.69s
     65       0.50709       0.48435      1.04697      0.82386  8.68s
     66       0.50666       0.48376      1.04734      0.82386  8.68s
     67       0.50624       0.48319      1.04771      0.82386  8.69s
     68       0.50584       0.48264      1.04807      0.82386  8.68s
     69       0.50545       0.48211      1.04842      0.82386  8.68s
     70       0.50509       0.48161      1.04876      0.82386  8.68s
     71       0.50474       0.48112      1.04910      0.82386  8.69s
     72       0.50441       0.48065      1.04943      0.82386  8.68s
     73       0.50409       0.48020      1.04975      0.82386  8.68s
     74       0.50378       0.47976      1.05007      0.82386  8.69s
     75       0.50349       0.47934      1.05038      0.82386  8.68s
     76       0.50321       0.47894      1.05068      0.82386  8.68s
     77       0.50295       0.47855      1.05098      0.82386  8.68s
     78       0.50269       0.47818      1.05127      0.82386  8.69s
     79       0.50245       0.47782      1.05156      0.82386  8.68s
     80       0.50222       0.47747      1.05183      0.82386  8.68s
     81       0.50199       0.47713      1.05211      0.82386  8.69s
     82       0.50178       0.47681      1.05238      0.82386  8.68s
     83       0.50158       0.47650      1.05264      0.82386  8.68s
     84       0.50138       0.47620      1.05289      0.82386  8.68s
     85       0.50120       0.47591      1.05314      0.82386  8.69s
     86       0.50102       0.47563      1.05339      0.82386  8.68s
     87       0.50085       0.47536      1.05363      0.82386  8.68s
     88       0.50069       0.47510      1.05387      0.82386  8.68s
     89       0.50053       0.47485      1.05410      0.82386  8.68s
     90       0.50039       0.47460      1.05432      0.82386  8.68s
     91       0.50024       0.47437      1.05454      0.82386  8.68s
     92       0.50011       0.47414      1.05476      0.82386  8.69s
     93       0.49998       0.47393      1.05497      0.82386  8.68s
     94       0.49985       0.47371      1.05518      0.82386  8.69s
     95       0.49973       0.47351      1.05538      0.82386  8.68s
     96       0.49962       0.47331      1.05558      0.82386  8.68s
     97       0.49951       0.47312      1.05577      0.82386  8.68s
     98       0.49941       0.47294      1.05596      0.82386  8.68s
     99       0.49931       0.47276      1.05615      0.82386  8.69s
    100       0.49921       0.47259      1.05633      0.82386  8.68s
    101       0.49912       0.47242      1.05651      0.82386  8.68s
    102       0.49904       0.47226      1.05669      0.82386  8.68s
    103       0.49895       0.47211      1.05686      0.82386  8.68s
    104       0.49887       0.47196      1.05703      0.82386  8.68s
    105       0.49880       0.47181      1.05719      0.82386  8.68s
    106       0.49872       0.47167      1.05735      0.82386  8.68s
    107       0.49865       0.47154      1.05751      0.82386  8.68s
    108       0.49859       0.47141      1.05766      0.82386  8.68s
    109       0.49852       0.47128      1.05781      0.82386  8.68s
    110       0.49846       0.47115      1.05796      0.82386  8.68s
    111       0.49840       0.47104      1.05810      0.82386  8.68s
    112       0.49835       0.47092      1.05824      0.82386  8.68s
    113       0.49830       0.47081      1.05838      0.82386  8.68s
    114       0.49824       0.47070      1.05852      0.82386  9.40s
    115       0.49820       0.47059      1.05865      0.82386  9.22s
    116       0.49815       0.47049      1.05878      0.82386  8.68s
    117       0.49810       0.47039      1.05891      0.82386  8.99s
    118       0.49806       0.47030      1.05903      0.82386  9.79s
    119       0.49802       0.47021      1.05916      0.82386  9.53s
    120       0.49798       0.47012      1.05927      0.82386  9.56s
    121       0.49794       0.47003      1.05939      0.82386  9.03s
    122       0.49791       0.46994      1.05951      0.82386  9.29s
    123       0.49787       0.46986      1.05962      0.82386  9.53s
    124       0.49784       0.46978      1.05973      0.82386  9.71s
    125       0.49781       0.46971      1.05983      0.82386  9.21s
    126       0.49778       0.46963      1.05994      0.82386  9.53s
    127       0.49775       0.46956      1.06004      0.82386  9.18s
    128       0.49772       0.46949      1.06014      0.82386  9.18s
    129       0.49770       0.46942      1.06024      0.82386  9.17s
    130       0.49767       0.46935      1.06034      0.82386  9.42s
    131       0.49765       0.46929      1.06043      0.82386  9.94s
    132       0.49763       0.46923      1.06053      0.82386  9.53s
    133       0.49760       0.46916      1.06062      0.82386  9.36s
    134       0.49758       0.46911      1.06071      0.82386  9.13s
    135       0.49756       0.46905      1.06079      0.82386  9.14s
    136       0.49754       0.46899      1.06088      0.82386  9.11s
    137       0.49753       0.46894      1.06096      0.82386  9.23s
    138       0.49751       0.46889      1.06104      0.82386  9.53s
    139       0.49749       0.46883      1.06112      0.82386  9.37s
    140       0.49748       0.46879      1.06120      0.82386  9.52s
    141       0.49746       0.46874      1.06128      0.82386  9.15s
    142       0.49745       0.46869      1.06136      0.82386  9.14s
    143       0.49743       0.46864      1.06143      0.82386  9.19s
    144       0.49742       0.46860      1.06150      0.82386  9.41s
    145       0.49741       0.46856      1.06157      0.82386  9.50s
    146       0.49739       0.46852      1.06164      0.82386  9.61s
    147       0.49738       0.46847      1.06171      0.82386  9.50s
    148       0.49737       0.46843      1.06177      0.82386  9.40s
    149       0.49736       0.46840      1.06184      0.82386  9.61s
    150       0.49735       0.46836      1.06190      0.82386  9.19s
    151       0.49734       0.46832      1.06197      0.82386  9.22s
    152       0.49733       0.46829      1.06203      0.82386  9.80s
    153       0.49732       0.46825      1.06209      0.82386  9.31s
    154       0.49732       0.46822      1.06215      0.82386  10.02s
    155       0.49731       0.46819      1.06220      0.82386  9.45s
    156       0.49730       0.46815      1.06226      0.82386  9.37s
    157       0.49729       0.46812      1.06231      0.82386  9.56s
    158       0.49729       0.46809      1.06237      0.82386  9.66s
    159       0.49728       0.46806      1.06242      0.82386  9.51s
    160       0.49727       0.46803      1.06247      0.82386  9.66s
    161       0.49727       0.46801      1.06252      0.82386  9.58s
    162       0.49726       0.46798      1.06257      0.82386  9.37s
    163       0.49726       0.46795      1.06262      0.82386  10.15s
    164       0.49725       0.46793      1.06267      0.82386  9.49s
    165       0.49725       0.46790      1.06272      0.82386  9.71s
    166       0.49724       0.46788      1.06276      0.82386  9.29s
    167       0.49724       0.46785      1.06281      0.82386  9.70s
    168       0.49723       0.46783      1.06285      0.82386  9.37s
    169       0.49723       0.46781      1.06289      0.82386  9.32s
    170       0.49722       0.46778      1.06293      0.82386  9.62s
    171       0.49722       0.46776      1.06298      0.82386  9.54s
    172       0.49722       0.46774      1.06302      0.82386  9.17s
    173       0.49721       0.46772      1.06306      0.82386  9.13s
    174       0.49721       0.46770      1.06309      0.82386  9.27s
    175       0.49721       0.46768      1.06313      0.82386  9.12s
    176       0.49720       0.46766      1.06317      0.82386  9.30s
    177       0.49720       0.46764      1.06321      0.82386  9.05s
    178       0.49720       0.46763      1.06324      0.82386  9.08s
    179       0.49720       0.46761      1.06328      0.82386  10.38s
    180       0.49719       0.46759      1.06331      0.82386  10.36s
    181       0.49719       0.46757      1.06334      0.82386  9.76s
    182       0.49719       0.46756      1.06338      0.82386  10.07s
    183       0.49719       0.46754      1.06341      0.82386  9.27s
    184       0.49719       0.46753      1.06344      0.82386  9.47s
    185       0.49718       0.46751      1.06347      0.82386  9.54s
    186       0.49718       0.46750      1.06350      0.82386  9.52s
    187       0.49718       0.46748      1.06353      0.82386  9.61s
    188       0.49718       0.46747      1.06356      0.82386  9.64s
    189       0.49718       0.46745      1.06359      0.82386  9.41s
    190       0.49718       0.46744      1.06362      0.82386  9.71s
    191       0.49718       0.46743      1.06364      0.82386  9.68s
    192       0.49717       0.46741      1.06367      0.82386  9.45s
    193       0.49717       0.46740      1.06370      0.82386  9.91s
    194       0.49717       0.46739      1.06372      0.82386  9.55s
    195       0.49717       0.46738      1.06375      0.82386  9.53s
    196       0.49717       0.46736      1.06377      0.82386  9.78s
    197       0.49717       0.46735      1.06380      0.82386  9.70s
    198       0.49717       0.46734      1.06382      0.82386  9.35s
    199       0.49717       0.46733      1.06384      0.82386  9.31s
    200       0.49717       0.46732      1.06387      0.82386  9.31s
    201       0.49717       0.46731      1.06389      0.82386  9.47s
    202       0.49717       0.46730      1.06391      0.82386  9.61s
    203       0.49717       0.46729      1.06393      0.82386  9.26s
    204       0.49716       0.46728      1.06395      0.82386  9.52s
    205       0.49716       0.46727      1.06397      0.82386  9.13s
    206       0.49716       0.46726      1.06399      0.82386  9.11s
    207       0.49716       0.46725      1.06401      0.82386  9.16s
    208       0.49716       0.46724      1.06403      0.82386  9.17s
    209       0.49716       0.46723      1.06405      0.82386  9.45s
    210       0.49716       0.46723      1.06407      0.82386  9.36s
    211       0.49716       0.46722      1.06409      0.82386  9.35s
    212       0.49716       0.46721      1.06411      0.82386  9.64s
    213       0.49716       0.46720      1.06412      0.82386  9.45s
    214       0.49716       0.46719      1.06414      0.82386  9.60s
    215       0.49716       0.46719      1.06416      0.82386  9.27s
    216       0.49716       0.46718      1.06417      0.82386  9.62s
    217       0.49716       0.46717      1.06419      0.82386  9.62s
    218       0.49716       0.46717      1.06421      0.82386  9.49s
    219       0.49716       0.46716      1.06422      0.82386  9.23s
    220       0.49716       0.46715      1.06424      0.82386  9.38s
    221       0.49716       0.46714      1.06425      0.82386  9.44s
    222       0.49716       0.46714      1.06427      0.82386  9.63s
    223       0.49716       0.46713      1.06428      0.82386  9.44s
    224       0.49716       0.46713      1.06429      0.82386  9.28s
    225       0.49716       0.46712      1.06431      0.82386  9.33s
    226       0.49716       0.46711      1.06432      0.82386  9.13s
    227       0.49716       0.46711      1.06433      0.82386  9.19s
    228       0.49716       0.46710      1.06435      0.82386  9.51s
    229       0.49716       0.46710      1.06436      0.82386  9.12s
    230       0.49716       0.46709      1.06437      0.82386  9.62s
    231       0.49716       0.46709      1.06438      0.82386  9.82s
    232       0.49716       0.46708      1.06440      0.82386  10.11s
    233       0.49716       0.46708      1.06441      0.82386  10.29s
    234       0.49716       0.46707      1.06442      0.82386  9.68s
    235       0.49716       0.46707      1.06443      0.82386  9.20s
    236       0.49716       0.46706      1.06444      0.82386  9.51s
    237       0.49716       0.46706      1.06445      0.82386  9.47s
    238       0.49716       0.46705      1.06446      0.82386  9.69s
    239       0.49716       0.46705      1.06447      0.82386  9.56s
    240       0.49716       0.46704      1.06448      0.82386  9.50s
    241       0.49716       0.46704      1.06449      0.82386  9.80s
    242       0.49716       0.46703      1.06450      0.82386  9.67s
    243       0.49716       0.46703      1.06451      0.82386  9.69s
    244       0.49716       0.46703      1.06452      0.82386  9.43s
    245       0.49716       0.46702      1.06453      0.82386  9.73s
    246       0.49716       0.46702      1.06454      0.82386  10.01s
    247       0.49716       0.46702      1.06455      0.82386  9.51s
    248       0.49716       0.46701      1.06456      0.82386  9.23s
    249       0.49716       0.46701      1.06456      0.82386  9.39s
    250       0.49716       0.46700      1.06457      0.82386  9.40s
    251       0.49716       0.46700      1.06458      0.82386  9.95s
    252       0.49716       0.46700      1.06459      0.82386  9.47s
    253       0.49716       0.46699      1.06460      0.82386  10.59s
    254       0.49716       0.46699      1.06460      0.82386  9.63s
    255       0.49716       0.46699      1.06461      0.82386  9.50s
    256       0.49716       0.46699      1.06462      0.82386  10.11s
    257       0.49716       0.46698      1.06463      0.82386  9.54s
    258       0.49716       0.46698      1.06463      0.82386  9.51s
    259       0.49716       0.46698      1.06464      0.82386  9.74s
    260       0.49716       0.46697      1.06465      0.82386  9.89s
    261       0.49716       0.46697      1.06465      0.82386  9.55s
    262       0.49716       0.46697      1.06466      0.82386  10.39s
    263       0.49716       0.46697      1.06467      0.82386  9.67s
    264       0.49716       0.46696      1.06467      0.82386  9.71s
    265       0.49716       0.46696      1.06468      0.82386  9.85s
    266       0.49716       0.46696      1.06468      0.82386  9.74s
    267       0.49716       0.46696      1.06469      0.82386  9.51s
    268       0.49716       0.46695      1.06470      0.82386  9.69s
    269       0.49716       0.46695      1.06470      0.82386  9.61s
    270       0.49716       0.46695      1.06471      0.82386  9.58s
    271       0.49716       0.46695      1.06471      0.82386  9.36s
    272       0.49716       0.46694      1.06472      0.82386  9.69s
    273       0.49716       0.46694      1.06472      0.82386  9.60s
    274       0.49716       0.46694      1.06473      0.82386  9.43s
    275       0.49716       0.46694      1.06473      0.82386  10.05s
    276       0.49716       0.46694      1.06474      0.82386  9.80s
    277       0.49716       0.46693      1.06474      0.82386  9.47s
    278       0.49716       0.46693      1.06475      0.82386  9.60s
    279       0.49716       0.46693      1.06475      0.82386  9.58s
    280       0.49716       0.46693      1.06476      0.82386  10.12s
    281       0.49716       0.46693      1.06476      0.82386  9.39s
    282       0.49716       0.46692      1.06476      0.82386  9.84s
    283       0.49716       0.46692      1.06477      0.82386  9.83s
    284       0.49716       0.46692      1.06477      0.82386  9.69s
    285       0.49716       0.46692      1.06478      0.82386  9.18s
    286       0.49716       0.46692      1.06478      0.82386  9.41s
    287       0.49716       0.46692      1.06478      0.82386  9.23s
    288       0.49716       0.46691      1.06479      0.82386  9.28s
    289       0.49716       0.46691      1.06479      0.82386  9.27s
    290       0.49716       0.46691      1.06480      0.82386  9.19s
    291       0.49716       0.46691      1.06480      0.82386  9.32s
    292       0.49716       0.46691      1.06480      0.82386  9.38s
    293       0.49716       0.46691      1.06481      0.82386  8.87s
    294       0.49716       0.46691      1.06481      0.82386  8.87s
    295       0.49717       0.46690      1.06481      0.82386  8.98s
    296       0.49717       0.46690      1.06482      0.82386  8.74s
    297       0.49717       0.46690      1.06482      0.82386  8.86s
    298       0.49717       0.46690      1.06482      0.82386  9.47s
    299       0.49717       0.46690      1.06483      0.82386  9.36s
    300       0.49717       0.46690      1.06483      0.82386  9.24s
    301       0.49717       0.46690      1.06483      0.82386  9.15s
    302       0.49717       0.46690      1.06483      0.82386  9.12s
    303       0.49717       0.46689      1.06484      0.82386  9.85s
    304       0.49717       0.46689      1.06484      0.82386  9.15s
    305       0.49717       0.46689      1.06484      0.82386  9.13s
    306       0.49717       0.46689      1.06484      0.82386  9.28s
    307       0.49717       0.46689      1.06485      0.82386  9.60s
    308       0.49717       0.46689      1.06485      0.82386  9.24s
    309       0.49717       0.46689      1.06485      0.82386  9.86s
    310       0.49717       0.46689      1.06485      0.82386  9.65s
    311       0.49717       0.46689      1.06486      0.82386  9.53s
    312       0.49717       0.46688      1.06486      0.82386  9.37s
    313       0.49717       0.46688      1.06486      0.82386  9.93s
    314       0.49717       0.46688      1.06486      0.82386  10.12s
    315       0.49717       0.46688      1.06487      0.82386  9.29s
    316       0.49717       0.46688      1.06487      0.82386  9.14s
    317       0.49717       0.46688      1.06487      0.82386  9.22s
    318       0.49717       0.46688      1.06487      0.82386  8.76s
    319       0.49717       0.46688      1.06487      0.82386  8.96s
    320       0.49717       0.46688      1.06488      0.82386  9.54s

In [17]:
f = open('/home/faizy/workspace/project/project/models/detector_2.pkl', 'wb')
pkl.dump(net, f)
f.close()

In [144]:
from sklearn.metrics import accuracy_score, classification_report
print classification_report(test_y, pred)


             precision    recall  f1-score   support

          0       0.00      0.00      0.00         0
          1       0.00      0.00      0.00      1541

avg / total       0.00      0.00      0.00      1541


In [72]:
from sklearn.feature_extraction import image
patches = image.extract_patches(img, (100, 60, 3), extraction_step = 5)
print patches.shape


(134, 193, 1, 100, 60, 3)

In [73]:
from scipy.misc import imresize
new_lst = []
for i in range(patches.shape[0]):
    for j in range(patches.shape[1]):
        new_lst.append(imresize(patches[i, j, 0, :, :, :], (32, 32)))
        
print len(new_lst)


25862

In [74]:
new_list = np.stack(new_lst)
new_list = new_list.dot([0.299, 0.587, 0.144])
tester = new_list.reshape(patches.shape[0]*patches.shape[1], 1, 32, 32)

In [75]:
tester /= tester.std(axis = None)
tester -= tester.mean()
tester = tester.astype('float32')

In [76]:
print tester.shape


(25862, 1, 32, 32)

In [77]:
preder = net.predict_proba(tester)

In [78]:
heatmap = preder[:, 1].reshape((patches.shape[0], patches.shape[1]))
print heatmap.shape


(134, 193)

In [79]:
pylab.pcolor(heatmap[::-1])
pylab.axis('off')
pylab.show()
pylab.imshow(img)
pylab.axis('off')
pylab.show()



In [54]:
preder.shape


Out[54]:
(22940,)

In [23]:
def visualize(dataset):
        '''This function visualizes data
        
        Input : numpy array (image_number, color_channels, height, width)
        '''
        i = random.randrange(0, dataset.shape[0])
        img = np.reshape(dataset[i, ...], ( dataset.shape[2], dataset.shape[3] ))
        pylab.imshow(img)
        pylab.gray()
        pylab.show()

In [125]:
visualize(train3_x)



In [138]:
pred


Out[138]:
array([0, 0, 0, ..., 0, 0, 0], dtype=int32)

In [ ]: