In [ ]:
# imports and logger!
from opendeep.log.logger import config_root_logger
from opendeep.models.container import Prototype
from opendeep.models.single_layer.basic import BasicLayer, SoftmaxLayer
from opendeep.optimization.stochastic_gradient_descent import SGD
from opendeep.data.standard_datasets.image.mnist import MNIST, datasets
from opendeep.monitor.monitor import Monitor

config_root_logger()

In [ ]:
# Create the MLP with two hidden layers and one classification layer
mlp = Prototype()
mlp.add(
    BasicLayer(input_size=28*28, output_size=1000, activation='tanh', noise='dropout', noise_level=0.3)
)
mlp.add(
    BasicLayer(output_size=1000, activation='tanh', noise='dropout', noise_level=0.3)
)
mlp.add(
    SoftmaxLayer(output_size=10)
)

In [ ]:
# Create the MNIST data object
mnist = MNIST(concat_train_valid=True)

# Create the optimizer object
optimizer = SGD(model=mlp, 
                dataset=mnist, 
                n_epoch=30, 
                batch_size=500, 
                learning_rate=.01, 
                momentum=.9,
                nesterov_momentum=True)

# Make a monitor to watch the train and test prediction errors
errorMonitor = Monitor('error', mlp.get_monitors()['softmax_error'], train=True, test=True)

In [ ]:
# Train the model with the optimizer!
optimizer.train(monitor_channels=errorMonitor)

In [ ]:
# Make some predictions on test data!
test_data, test_labels = mnist.getSubset(datasets.TEST)

n=30
predictions = mlp.run(test_data.get_value())
labels = test_labels.get_value().astype('int32')

print "Predictions:", predictions[:n]
print "Correct:    ", labels[:n]
print "Accuracy:   ", sum((predictions==labels) * 1./len(labels))*100, "%"