In [2]:
from NNPy import *
NB: Un "Network" est composé d'un module et d'un loss
In [3]:
def perceptron(inDim,outDim):
#Construction d'un Perceptron
lm = LinearModule(inDim,outDim)
hl = HingeLoss()
hm = HorizontalModule([lm])
#Perceptron
return NetworkModule([hm],hl)
def multiLayerPerceptron(inDim,hidden,out):
return NetworkModule([HorizontalModule([LinearModule(inDim,hidden),TanhModule(hidden,hidden), LinearModule(hidden,out)])],SquareLoss())
In [9]:
from DataClass import *
print("----====== MNIST 8/6 =======----")
trainV,trainL,testV,testL = getMnistDualDataset()
print("----Perceptron----")
NBITER = 10
GD_STEP = 0.00001
network = perceptron(28*28,1)
network.trainTest(trainV,trainL,testV,testL,NBITER,GD_STEP)
print ('----multiLayerPerceptron-----')
HIDDEN = 2
GD_STEP = 0.00001
network = multiLayerPerceptron(28*28,HIDDEN,1)
network.trainTest(trainV,trainL,testV,testL,NBITER,GD_STEP)
In [15]:
from sklearn.datasets import fetch_mldata
mnist=fetch_mldata('MNIST original')
mnist
Out[15]:
In [22]:
HIDDEN = 50
network = multiLayerPerceptron(28*28,HIDDEN,10)
for a,b in zip(mnist["data"],mnist["target"]):
print a
In [ ]: