In [ ]:
# imports and logger!
from opendeep.log import config_root_logger
from opendeep.models import Prototype, Dense, SoftmaxLayer
from opendeep.optimization import SGD
from opendeep.data import MNIST
from opendeep.monitor import Monitor, FileService

config_root_logger()

In [ ]:
# Create the MLP with two hidden layers and one classification layer
mlp = Prototype()
mlp.add(
    Dense(input_size=28*28, output_size=1000, activation='tanh', noise='dropout', noise_level=0.3)
)
mlp.add(
    Dense(output_size=1000, activation='tanh', noise='dropout', noise_level=0.3)
)
mlp.add(
    SoftmaxLayer(output_size=10)
)

In [ ]:
# Create the MNIST data object
mnist = MNIST(concat_train_valid=True)

# Create the optimizer object
optimizer = SGD(model=mlp, 
                dataset=mnist, 
                epochs=100, 
                batch_size=500, 
                learning_rate=.01, 
                momentum=.9,
                nesterov_momentum=True)

# Make a monitor to watch the train and test prediction errors
errorMonitor = Monitor('error', mlp.get_monitors()['softmax_error'], train=True, test=True)

In [ ]:
# Train the model with the optimizer!
optimizer.train(monitor_channels=[errorMonitor])

In [ ]:
# Make some predictions on test data!
test_data, test_labels = mnist.test_inputs, mnist.test_targets

n=30
predictions = mlp.run(test_data)
labels = test_labels.astype('int32')

print "Predictions:", predictions[:n]
print "Correct:    ", labels[:n]
print "Accuracy:   ", sum((predictions==labels) * 1./len(labels))*100, "%"

In [ ]: