In [2]:
from keras.models import Sequential

In [34]:
import numpy as np
X_train = np.random.rand(1000,100)
Y_train = [np.random.randint(0,5) for i in xrange(0,1000)]

In [35]:
model = Sequential()

In [36]:
from keras.layers.core import Dense,Activation

model.add(Dense(output_dim=64, input_dim=100, init="glorot_uniform"))
model.add(Activation("relu"))
model.add(Dense(output_dim=10, init="glorot_uniform"))
model.add(Activation("softmax"))

In [37]:
model.compile(loss="categorical_crossentropy", optimizer="sgd")

In [38]:
from keras.optimizers import SGD
model.compile(loss="categorical_crossentropy", optimizer=SGD(lr=0.01, momentum=0.9, nesterov=True))

In [39]:
model.fit(X_train, Y_train, nb_epoch=5, batch_size=32)


Epoch 1/5
1000/1000 [==============================] - 0s - loss: 46.9767     
Epoch 2/5
1000/1000 [==============================] - 0s - loss: 46.4905     
Epoch 3/5
1000/1000 [==============================] - 0s - loss: 46.4733     
Epoch 4/5
1000/1000 [==============================] - 0s - loss: 46.4694     
Epoch 5/5
1000/1000 [==============================] - 0s - loss: 46.4680     
Out[39]:
<keras.callbacks.History at 0x7fa2cc208950>

In [41]:
objective_score = model.evaluate(X_train, Y_train, batch_size=32)


1000/1000 [==============================] - 0s     

In [43]:
objective_score


Out[43]:
46.467602020263669

In [44]:
classes = model.predict_classes(X_train, batch_size=32)


1000/1000 [==============================] - 0s     

In [46]:
proba = model.predict_proba(X_train, batch_size=32)


1000/1000 [==============================] - 0s     

In [ ]: