In [ ]:
import os
import numpy as np
from pandas.io.parsers import read_csv
from sklearn.utils import shuffle
In [ ]:
FTRAIN = 'new/train.csv'
FTEST = 'new/test.csv'
def load(test=False, cols=None):
fname = FTEST if test else FTRAIN
df = read_csv(os.path.expanduser(fname)) # load pandas dataframe
df['Image'] = df['Image'].apply(lambda im: np.fromstring(im, sep=' '))
if cols:
df = df[list(cols) + ['Image']]
print(df.count()) # prints the number of values for each column
df = df.dropna() # drop all rows that have missing values in them
X = np.vstack(df['Image'].values) / 255. # scale pixel values to [0, 1]
X = X.astype(np.float32)
if not test: # only FTRAIN has any target columns
y = df[df.columns[:-1]].values
y = (y - 48) / 48 # scale target coordinates to [-1, 1]
X, y = shuffle(X, y, random_state=2) # shuffle train data
y = y.astype(np.float32)
else:
y = None
return X, y
X, y = load()
In [ ]:
#тут буде частина кода, яка відповідає за оптимізацію слоїв
#і буде служити для відкладки потрібних параметрів виборок
print("output_num_units == {}; y.min == {:.3f}; y.max == {:.3f}".format(y.shape, y.min(), y.max()))
In [ ]:
#Кусок коду, який відповідає за створення моделі картинки
#в вид придатний для свертки
def load2d(test=False, cols=None):
X, y = load(test=test)
X = X.reshape(-1, 1, 96, 96)
return X, y
In [ ]:
from lasagne import layers
from lasagne.updates import nesterov_momentum
from nolearn.lasagne import NeuralNet
from lasagne.layers import DenseLayer
from lasagne.layers import InputLayer
from lasagne.layers import DropoutLayer
from lasagne.layers import Conv2DLayer
from lasagne.layers import MaxPool2DLayer
from lasagne.nonlinearities import softmax
from lasagne.updates import adam
from lasagne.layers import get_all_params
In [ ]:
net1 = NeuralNet(
layers=[
('input', layers.InputLayer),
('conv1', layers.Conv2DLayer),
('pool1', layers.MaxPool2DLayer),
('conv2', layers.Conv2DLayer),
('pool2', layers.MaxPool2DLayer),
('conv3', layers.Conv2DLayer),
('pool3', layers.MaxPool2DLayer),
('hidden4', layers.DenseLayer),
('hidden5', layers.DenseLayer),
('output', layers.DenseLayer),
],
#Настройки слоїв
input_shape=(None, 1, 96, 96),
conv1_num_filters=32,
conv1_filter_size=(3, 3),
pool1_pool_size=(2, 2),
conv2_num_filters=64,
conv2_filter_size=(2, 2),
pool2_pool_size=(2, 2),
conv3_num_filters=128,
conv3_filter_size=(2, 2),
pool3_pool_size=(2, 2),
hidden4_num_units=500,
hidden5_num_units=500,
output_num_units=2,
#Кінець настройки слоїв
#оптимізація
output_nonlinearity=None,
update_learning_rate=0.01,
update_momentum=0.9,
regression=True,
max_epochs=10,
verbose=1,
)
X, y = load2d()
net1.fit(X, y)
In [ ]:
from graphviz import Digraph
import pydotplus
from nolearn.lasagne.visualize import draw_to_notebook
from nolearn.lasagne.visualize import plot_loss
from nolearn.lasagne.visualize import plot_conv_weights
from nolearn.lasagne.visualize import plot_conv_activity
from nolearn.lasagne.visualize import plot_occlusion
from nolearn.lasagne.visualize import plot_saliency
In [ ]:
#потрібна інсталяція graphviz sudo apt-get install graphviz
draw_to_notebook(net1)
In [ ]:
import matplotlib
import matplotlib.pyplot as plt
import matplotlib.cm as cm
In [ ]:
train_loss = np.array([i["train_loss"] for i in net1.train_history_])
valid_loss = np.array([i["valid_loss"] for i in net1.train_history_])
In [ ]:
plt.plot(train_loss, linewidth=3, label="train")
plt.plot(valid_loss, linewidth=3, label="valid")
plt.grid()
plt.legend()
plt.xlabel("epoch")
plt.ylabel("loss")
plt.ylim(1e-5, 1e2)
plt.yscale("log")
plt.show()
In [ ]:
def plot_sample(x, y, axis):
img = x.reshape(96, 96)
axis.imshow(img, cmap='gray', interpolation='none') #добавив інтерполяцію
axis.scatter(y[0::2] * 48 + 48, y[1::2] * 48 + 48, marker='x', s=10)
X, _ = load2d(test=True)
y_pred = net1.predict(X)
fig = plt.figure(figsize=(24, 24)) #розміри фігури
fig.subplots_adjust(
left=0, right=1, bottom=0, top=1, hspace=0.05, wspace=0.05)
for i in range(1):
ax = fig.add_subplot(4, 4, i + 1)
ax.set_title("Picture:") #заголовок
ax.set_xticks([]) #рамка по х
ax.set_yticks([]) # рамка по н
plot_sample(X[i], y_pred[i], ax)
plt.show()
In [ ]:
plot_conv_weights(net1.layers_[1], figsize=(4, 4))
plt.show()
In [ ]:
plot_conv_activity(net1.layers_[1], X[0:2])
plt.show()
In [ ]:
plot_saliency(net1, X[0:2]);
plt.show()
In [ ]:
#А тут код який збереже нашу нейронну мережу
import cPickle as pickle
with open('vova.pickle', 'wb') as f:
pickle.dump(net1, f, -1)