In [10]:
import keras as keras
from keras.models import Sequential
from keras.layers import Dense, Activation

model = Sequential([
    Dense(32, input_shape=(100,)),
    Activation('relu'),
    Dense(2),
    Activation('softmax'),
])

In [11]:
# For a binary classification problem
model.compile(optimizer='rmsprop',
              loss='binary_crossentropy',
              metrics=['accuracy'])

In [12]:
# Generate dummy data
import numpy as np
data = np.random.random((1000, 100))
labels = np.random.randint(2, size=(1000, 2))

# Convert labels to categorical one-hot encoding
one_hot_labels = keras.utils.to_categorical(labels, num_classes=2)

# Train the model, iterating on the data in batches of 32 samples
model.fit(data, labels, epochs=10, batch_size=32)


Epoch 1/10
1000/1000 [==============================] - 0s 225us/step - loss: 0.7242 - acc: 0.4875
Epoch 2/10
1000/1000 [==============================] - 0s 30us/step - loss: 0.7130 - acc: 0.5085
Epoch 3/10
1000/1000 [==============================] - 0s 33us/step - loss: 0.7039 - acc: 0.5115
Epoch 4/10
1000/1000 [==============================] - 0s 34us/step - loss: 0.6987 - acc: 0.5165
Epoch 5/10
1000/1000 [==============================] - 0s 34us/step - loss: 0.6954 - acc: 0.5235
Epoch 6/10
1000/1000 [==============================] - 0s 35us/step - loss: 0.6921 - acc: 0.5355
Epoch 7/10
1000/1000 [==============================] - 0s 30us/step - loss: 0.6883 - acc: 0.5535
Epoch 8/10
1000/1000 [==============================] - 0s 36us/step - loss: 0.6846 - acc: 0.5385
Epoch 9/10
1000/1000 [==============================] - 0s 31us/step - loss: 0.6815 - acc: 0.5585
Epoch 10/10
1000/1000 [==============================] - 0s 34us/step - loss: 0.6775 - acc: 0.5785
Out[12]:
<keras.callbacks.History at 0x1d883477128>

In [ ]: