In [ ]:
import os
import pylearn2

with open('mlp.yaml','r') as f:
    train = f.read()

print train

In [ ]:
from pylearn2.config import yaml_parse
train = yaml_parse.load(train)
train.main_loop()

In [ ]:
import plot_monitor

plot_monitor.run(model_paths=["mlp_best.pkl"], options_out=None, show_codes=["train_y_misclass"])

In [ ]:
import scipy.io as io
import numpy as np
import pylearn2
from pylearn2.datasets.dense_design_matrix import DenseDesignMatrix
import random as rd

class XOR(DenseDesignMatrix):
    def __init__(self):
        self.class_names = ['0', '1']
        X = [[rd.randint(0, 1), rd.randint(0, 1)] for _ in range(1000)]
        y = []
        for a, b in X:
            if a + b == 1:
                y.append([0, 1])
            else:
                y.append([1, 0])
        X = np.array(X)
        y = np.array(y)
        super(XOR, self).__init__(X=X, y=y)
 
ds = XOR()

In [ ]:
from pylearn2.models import mlp
hidden_layer = mlp.Sigmoid(layer_name='hidden', dim=2, irange=.1, init_bias=1.)

In [ ]:
output_layer = mlp.Softmax(2, 'output', irange=.1)

In [ ]:
from pylearn2.training_algorithms import sgd
from pylearn2.termination_criteria import EpochCounter
trainer = sgd.SGD(learning_rate=.05, batch_size=10, termination_criterion=EpochCounter(400))

In [ ]:
layers = [hidden_layer, output_layer]
ann = mlp.MLP(layers, nvis=2)
trainer.setup(ann, ds)

In [ ]:
while True:
    trainer.train(dataset=ds)
    ann.monitor.report_epoch()
    ann.monitor()
    if not trainer.continue_learning(ann):
        break

In [ ]:
import theano
inputs = np.array([[1, 0]])
print ann.fprop(theano.shared(inputs, name='inputs')).eval()

In [ ]:
import scipy.io as io
import numpy as np
from pylearn2.datasets.dense_design_matrix import DenseDesignMatrix
import random as rd
from random import randrange

class matlabData(DenseDesignMatrix):
    def __init__(self):
        self.class_names = ['0', '1']
        data=io.loadmat('digits.mat',squeeze_me=True)
        
        X = data['X']
        y = data['yp']
        
        xx=np.zeros((X.shape))
        yy=np.zeros((y.shape))
        for i in range((X.shape[0])):
            rd_index=randrange(0,X.shape[0])
            xx[i]=X[rd_index]
            yy[i]=y[rd_index]

        super(matlabData, self).__init__(X=xx, y=yy)
 
ds = matlabData()

In [ ]:
from pylearn2.models import mlp
first_layer = mlp.Sigmoid(layer_name='hidden1', dim=25, irange=.05, init_bias=1.)

In [ ]:
second_layer = mlp.Sigmoid(layer_name='hidden2', dim=26, irange=.05, init_bias=1.)

In [ ]:
output_layer = mlp.Softmax(10, 'output', irange=.05)

In [ ]:
from pylearn2.training_algorithms import sgd
from pylearn2.termination_criteria import EpochCounter
trainer = sgd.SGD(learning_rate=.05, batch_size=100, termination_criterion=EpochCounter(1000))

In [ ]:
layers = [first_layer,output_layer]
ann = mlp.MLP(layers, nvis=400)
trainer.setup(ann, ds)

In [ ]:
while True:
    trainer.train(dataset=ds)
    #ann.monitor.report_epoch()
    #ann.monitor()
    if not trainer.continue_learning(ann):
        break

In [ ]:
import theano
data=io.loadmat('digits.mat',squeeze_me=True)
X = data['X'][4001:5000,:]
y = data['y'][4001:5000]

predict=ann.fprop(theano.shared(X, name='inputs')).eval()

test= np.argmax(predict, axis=1)+1
print test
print y
print map(lambda x,y:x==y, y, test).count(True)/1000.

In [ ]:
import matplotlib.pyplot as plt
import numpy as np

x=range(999)

plt.plot(x,test,'rx')
plt.show()

In [ ]:
import scipy.io as io
import numpy as np
from pylearn2.datasets.dense_design_matrix import DenseDesignMatrix
import random as rd
from random import randrange

class matlabData(DenseDesignMatrix):
    def __init__(self):
        self.class_names = ['0', '1']
        data=io.loadmat('digits.mat',squeeze_me=True)
        
        X = data['X']
        y = data['yp']
        
        xx=np.zeros((X.shape))
        yy=np.zeros((y.shape))
        for i in range((X.shape[0])):
            rd_index=randrange(0,X.shape[0])
            xx[i]=X[rd_index]
            yy[i]=y[rd_index]

        super(matlabData, self).__init__(X=xx, y=yy)
 
ds = matlabData()

In [ ]:
first_layer = mlp.ConvRectifiedLinear(layer_name='conv1', output_channels=64, irange= .05, 
                                      kernel_shape= [5, 5],pool_shape= [4, 4],
                                      pool_stride= [2, 2],max_kernel_norm= 1.9365)

In [ ]:
second_layer = mlp.ConvRectifiedLinear(layer_name='conv2', output_channels=64, irange= .05, 
                                      kernel_shape= [5, 5],pool_shape= [4, 4],
                                      pool_stride= [2, 2],max_kernel_norm= 1.9365)

In [ ]:
output_layer = mlp.Softmax(max_col_norm= 1.9365,
                     layer_name= 'output',
                     n_classes= 10,
                     istdev= .05)

In [ ]:
from pylearn2.training_algorithms.learning_rule import Momentum
from pylearn2.costs.cost import SumOfCosts
from pylearn2.costs.cost import MethodCost
from pylearn2.costs.mlp import WeightDecay
from pylearn2.termination_criteria import And
from pylearn2.termination_criteria import EpochCounter

trainer = sgd.SGD(learning_rate=.01, batch_size=100, 
                  learning_rule=Momentum(init_momentum=0.5),
                  cost=SumOfCosts(costs=[
                        MethodCost(method='cost_from_X'),
                        WeightDecay(coeffs=[.00005, .00005, .00005])
                  ]),
                  termination_criterion=And(criteria=[
                        EpochCounter(max_epochs=500)
                  ]))

In [ ]:
from pylearn2.space import Conv2DSpace

entrada=Conv2DSpace(shape=[20,20],num_channels=1)
layers = [first_layer,second_layer,output_layer]
ann = mlp.MLP(layers=layers,batch_size=100,input_space=entrada)
trainer.setup(ann, ds)

In [ ]: