In [1]:
%matplotlib inline

import matplotlib.pyplot as plt
import matplotlib.image as mpimg
import numpy as np

import theano.tensor as T
import theano
from itertools import izip
import random
from random import shuffle


Using gpu device 0: GeForce GTX 980 Ti (CNMeM is enabled with initial size: 95.0% of memory, cuDNN not available)

讀取 Miku


In [2]:
img_count = 0
def showimg(img):
    muki_pr = np.zeros((500,500,3))
    l =img.tolist()
    count = 0
    for x in range(500):
        for y in range(500):
            if l[count][0] >= .5:
                muki_pr[y][x] = 1
            else:
                muki_pr[y][x] = 0
                
            count += 1
    plt.imshow(muki_pr)
def saveimg(fname,img):
    muki_pr = np.zeros((500,500,3))
    l =img.tolist()
    count = 0
    for x in range(500):
        for y in range(500):
            if l[count][0] >= .5:
                muki_pr[y][x] = 1
            else:
                muki_pr[y][x] = 0
                
            count += 1
    plt.imsave(fname,muki_pr)

In [3]:
def read_muki():
    img_data = np.random.randn(250000,2)
    xy_data = []
    import random

    f = open('./muki.txt','rb')
    count = 0
    for line in f:
        y,x,c = line.split()
        xy_data.append([float(x),float(y)])
        
        x = (float(x) )*100. + 250
        y = (float(y) )*100. + 250
        c = float(c)
        
        if c >= 1:
            img_data[count] = (1,0)
        else:
            img_data[count] = (0,1)
            
            

#         img_data[count] = c
        
        count = count + 1
    return np.matrix(xy_data),img_data

xy_data,img_data = read_muki()    
showimg(img_data)



In [4]:
print xy_data[:10]
print img_data[:10]


[[-2.5  -2.5 ]
 [-2.49 -2.5 ]
 [-2.48 -2.5 ]
 [-2.47 -2.5 ]
 [-2.46 -2.5 ]
 [-2.45 -2.5 ]
 [-2.44 -2.5 ]
 [-2.43 -2.5 ]
 [-2.42 -2.5 ]
 [-2.41 -2.5 ]]
[[ 0.  1.]
 [ 0.  1.]
 [ 0.  1.]
 [ 0.  1.]
 [ 0.  1.]
 [ 0.  1.]
 [ 0.  1.]
 [ 0.  1.]
 [ 0.  1.]
 [ 0.  1.]]

Muki NN


In [5]:
from keras.models import Sequential

model = Sequential()


Using Theano backend.

In [6]:
from keras.layers import Dense, Activation
from keras.optimizers import SGD

model.add(Dense(output_dim=128, input_dim=2))
model.add(Activation("relu"))
model.add(Dense(output_dim=128, input_dim=128))
model.add(Activation("relu"))
model.add(Dense(output_dim=8, input_dim=128))
model.add(Activation("relu"))
model.add(Dense(output_dim=2, input_dim=8))
model.add(Activation("softplus"))
model.compile(loss='mean_squared_error', optimizer=SGD(lr=0.05, momentum=0.9, nesterov=True), metrics=['accuracy'])

In [7]:
from IPython.display import SVG
from keras.utils.visualize_util import model_to_dot

SVG(model_to_dot(model).create(prog='dot', format='svg'))


Out[7]:
G 4442470864 dense_input_1 (InputLayer) 4442471120 dense_1 (Dense) 4442470864->4442471120 4441466832 activation_1 (Activation) 4442471120->4441466832 4442719312 dense_2 (Dense) 4441466832->4442719312 4442926224 activation_2 (Activation) 4442719312->4442926224 4442973072 dense_3 (Dense) 4442926224->4442973072 4442976208 activation_3 (Activation) 4442973072->4442976208 4442973200 dense_4 (Dense) 4442976208->4442973200 4443023376 activation_4 (Activation) 4442973200->4443023376

In [ ]:
for ii in range(50):
    model.fit(xy_data, img_data, nb_epoch=5, batch_size=500)
    result = model.predict(xy_data)
    saveimg('./imgs/muki_relu_'+ str(ii) +'.png', result)


Epoch 1/5
250000/250000 [==============================] - 0s - loss: 0.1160 - acc: 0.8502     
Epoch 2/5
250000/250000 [==============================] - 0s - loss: 0.0693 - acc: 0.9143     
Epoch 3/5
250000/250000 [==============================] - 0s - loss: 0.0495 - acc: 0.9409     
Epoch 4/5
250000/250000 [==============================] - 0s - loss: 0.0402 - acc: 0.9562     
Epoch 5/5
250000/250000 [==============================] - 0s - loss: 0.0345 - acc: 0.9638     
Epoch 1/5
250000/250000 [==============================] - 0s - loss: 0.0304 - acc: 0.9687     
Epoch 2/5
250000/250000 [==============================] - 0s - loss: 0.0281 - acc: 0.9711     
Epoch 3/5
250000/250000 [==============================] - 0s - loss: 0.0261 - acc: 0.9729     
Epoch 4/5
250000/250000 [==============================] - 0s - loss: 0.0253 - acc: 0.9739     
Epoch 5/5
250000/250000 [==============================] - 0s - loss: 0.0242 - acc: 0.9748     
Epoch 1/5
250000/250000 [==============================] - 0s - loss: 0.0232 - acc: 0.9752     
Epoch 2/5
250000/250000 [==============================] - 0s - loss: 0.0225 - acc: 0.9756     
Epoch 3/5
250000/250000 [==============================] - 0s - loss: 0.0217 - acc: 0.9758     
Epoch 4/5
250000/250000 [==============================] - 0s - loss: 0.0212 - acc: 0.9764     
Epoch 5/5
250000/250000 [==============================] - 0s - loss: 0.0205 - acc: 0.9768     
Epoch 1/5
250000/250000 [==============================] - 0s - loss: 0.0201 - acc: 0.9771     
Epoch 2/5
250000/250000 [==============================] - 0s - loss: 0.0194 - acc: 0.9781     
Epoch 3/5
250000/250000 [==============================] - 0s - loss: 0.0190 - acc: 0.9783     
Epoch 4/5
250000/250000 [==============================] - 0s - loss: 0.0184 - acc: 0.9789     
Epoch 5/5
250000/250000 [==============================] - 0s - loss: 0.0183 - acc: 0.9790     
Epoch 1/5
250000/250000 [==============================] - 0s - loss: 0.0178 - acc: 0.9794     
Epoch 2/5
250000/250000 [==============================] - 0s - loss: 0.0175 - acc: 0.9800     
Epoch 3/5
250000/250000 [==============================] - 0s - loss: 0.0171 - acc: 0.9806     
Epoch 4/5
250000/250000 [==============================] - 0s - loss: 0.0169 - acc: 0.9809     
Epoch 5/5
250000/250000 [==============================] - 0s - loss: 0.0167 - acc: 0.9811     
Epoch 1/5
250000/250000 [==============================] - 0s - loss: 0.0163 - acc: 0.9817     
Epoch 2/5
250000/250000 [==============================] - 0s - loss: 0.0161 - acc: 0.9820     
Epoch 3/5
250000/250000 [==============================] - 0s - loss: 0.0159 - acc: 0.9824     
Epoch 4/5
250000/250000 [==============================] - 0s - loss: 0.0157 - acc: 0.9823     
Epoch 5/5
250000/250000 [==============================] - 0s - loss: 0.0157 - acc: 0.9824     
Epoch 1/5
250000/250000 [==============================] - 0s - loss: 0.0154 - acc: 0.9825     
Epoch 2/5
250000/250000 [==============================] - 0s - loss: 0.0150 - acc: 0.9826     
Epoch 3/5
250000/250000 [==============================] - 0s - loss: 0.0149 - acc: 0.9826     
Epoch 4/5
250000/250000 [==============================] - 0s - loss: 0.0149 - acc: 0.9818     
Epoch 5/5
250000/250000 [==============================] - 0s - loss: 0.0146 - acc: 0.9823     
Epoch 1/5
250000/250000 [==============================] - 0s - loss: 0.0144 - acc: 0.9827     
Epoch 2/5
250000/250000 [==============================] - 0s - loss: 0.0142 - acc: 0.9827     
Epoch 3/5
250000/250000 [==============================] - 0s - loss: 0.0141 - acc: 0.9825     
Epoch 4/5
250000/250000 [==============================] - 0s - loss: 0.0140 - acc: 0.9827     
Epoch 5/5
250000/250000 [==============================] - 0s - loss: 0.0141 - acc: 0.9825     
Epoch 1/5
250000/250000 [==============================] - 0s - loss: 0.0139 - acc: 0.9830     
Epoch 2/5
250000/250000 [==============================] - 0s - loss: 0.0137 - acc: 0.9833     
Epoch 3/5
250000/250000 [==============================] - 0s - loss: 0.0139 - acc: 0.9825     
Epoch 4/5
250000/250000 [==============================] - 0s - loss: 0.0140 - acc: 0.9828     
Epoch 5/5
250000/250000 [==============================] - 0s - loss: 0.0137 - acc: 0.9826     
Epoch 1/5
250000/250000 [==============================] - 0s - loss: 0.0138 - acc: 0.9828     
Epoch 2/5
250000/250000 [==============================] - 0s - loss: 0.0139 - acc: 0.9831     
Epoch 3/5
250000/250000 [==============================] - 0s - loss: 0.0135 - acc: 0.9833     
Epoch 4/5
250000/250000 [==============================] - 0s - loss: 0.0132 - acc: 0.9834     
Epoch 5/5
250000/250000 [==============================] - 0s - loss: 0.0132 - acc: 0.9836     
Epoch 1/5
250000/250000 [==============================] - 0s - loss: 0.0133 - acc: 0.9835     
Epoch 2/5
250000/250000 [==============================] - 0s - loss: 0.0132 - acc: 0.9834     
Epoch 3/5
250000/250000 [==============================] - 0s - loss: 0.0130 - acc: 0.9837     
Epoch 4/5
250000/250000 [==============================] - 0s - loss: 0.0132 - acc: 0.9836     
Epoch 5/5
250000/250000 [==============================] - 0s - loss: 0.0129 - acc: 0.9838     
Epoch 1/5
250000/250000 [==============================] - 0s - loss: 0.0132 - acc: 0.9836     
Epoch 2/5
250000/250000 [==============================] - 0s - loss: 0.0130 - acc: 0.9840     
Epoch 3/5
250000/250000 [==============================] - 0s - loss: 0.0132 - acc: 0.9838     
Epoch 4/5
250000/250000 [==============================] - 0s - loss: 0.0128 - acc: 0.9841     
Epoch 5/5
250000/250000 [==============================] - 0s - loss: 0.0126 - acc: 0.9842     
Epoch 1/5
250000/250000 [==============================] - 0s - loss: 0.0129 - acc: 0.9842     
Epoch 2/5
250000/250000 [==============================] - 0s - loss: 0.0128 - acc: 0.9843     
Epoch 3/5
250000/250000 [==============================] - 0s - loss: 0.0129 - acc: 0.9840     
Epoch 4/5
250000/250000 [==============================] - 0s - loss: 0.0128 - acc: 0.9840     
Epoch 5/5
250000/250000 [==============================] - 0s - loss: 0.0124 - acc: 0.9848     
Epoch 1/5
250000/250000 [==============================] - 0s - loss: 0.0127 - acc: 0.9843     
Epoch 2/5
250000/250000 [==============================] - 0s - loss: 0.0125 - acc: 0.9843     
Epoch 3/5
250000/250000 [==============================] - 0s - loss: 0.0126 - acc: 0.9843     
Epoch 4/5
250000/250000 [==============================] - 0s - loss: 0.0124 - acc: 0.9846     
Epoch 5/5
250000/250000 [==============================] - 0s - loss: 0.0128 - acc: 0.9841     
Epoch 1/5
250000/250000 [==============================] - 0s - loss: 0.0125 - acc: 0.9845     
Epoch 2/5
250000/250000 [==============================] - 0s - loss: 0.0124 - acc: 0.9846     
Epoch 3/5
250000/250000 [==============================] - 0s - loss: 0.0125 - acc: 0.9846     
Epoch 4/5
250000/250000 [==============================] - 0s - loss: 0.0126 - acc: 0.9843     
Epoch 5/5
250000/250000 [==============================] - 0s - loss: 0.0125 - acc: 0.9844     

In [ ]:
result = model.predict(xy_data)
showimg(result)

結論:

  • data 作 random suffule 的效果奇差,原因可以解釋為其實圖片的資料上下位置有相關系,如果 random 打散再作 mini batch ,反而喪失掉上下 pixle 之間相關的資料
  • 在 hidden layer 設 128 個 neuron解釋力較強,至於原因需要再探咎
  • 128 nerou
  • Training 過程
  • 256 nerou
  • Training 過程

In [ ]: