In [1]:
from keras.models import Sequential
from keras.layers import Dense
import numpy
from keras.utils import np_utils
seed = 7
numpy.random.seed(seed)
from datetime import datetime


Using TensorFlow backend.

In [2]:
start=datetime.now()
Data=numpy.loadtxt("../../EXP-DATA/DATA/MNIST_data/mnist_train.csv", delimiter=",")
print datetime.now()-start


0:00:44.606826

In [3]:
nb_classes=10
X=Data[0:10000,1:]
Y_raw=Data[0:10000,0]
Y=np_utils.to_categorical(Y_raw, nb_classes)

In [7]:
model = Sequential()
model.add(Dense(3, input_dim=784, init='uniform', activation='relu'))
#model.add(Dense(8, init='uniform', activation='relu'))
model.add(Dense(nb_classes, init='uniform', activation='sigmoid'))

In [19]:
model.compile(loss='binary_crossentropy', optimizer='adam', metrics=['accuracy'])

In [22]:
hist = model.fit(X, Y, nb_epoch=5, batch_size=10,  verbose=1, validation_split =0.2)


Train on 8000 samples, validate on 2000 samples
Epoch 1/5
8000/8000 [==============================] - 2s - loss: 0.2311 - acc: 0.9176 - val_loss: 0.2381 - val_acc: 0.9137
Epoch 2/5
8000/8000 [==============================] - 2s - loss: 0.2306 - acc: 0.9177 - val_loss: 0.2332 - val_acc: 0.9173
Epoch 3/5
8000/8000 [==============================] - 2s - loss: 0.2301 - acc: 0.9176 - val_loss: 0.2355 - val_acc: 0.9181
Epoch 4/5
8000/8000 [==============================] - 2s - loss: 0.2313 - acc: 0.9175 - val_loss: 0.2357 - val_acc: 0.9175
Epoch 5/5
8000/8000 [==============================] - 2s - loss: 0.2313 - acc: 0.9178 - val_loss: 0.2355 - val_acc: 0.9176

In [16]:
model


Out[16]:
<keras.models.Sequential at 0x1138f2650>

In [ ]: