In [1]:
from neuralnet import *
from activations import *
import numpy as np

In [2]:
layer1 = Layer("layer1", 3, 1, LinearActivation(), SimpleLearner(0.005))

In [3]:
data = np.random.uniform(0, 10, (100,3))
targets = data[:, -1].reshape((100,1))

In [4]:
training = zip(data, targets)

In [5]:
nn = NeuralNet([layer1], MeanSquaredError(), 0.9)

In [6]:
nn.train(training, 50)


epoch: 0
cost:3.16907548733
epoch: 1
cost:1.81998410289
epoch: 2
cost:1.2587013738
epoch: 3
cost:0.942421398266
epoch: 4
cost:0.707602424938
epoch: 5
cost:0.56441135274
epoch: 6
cost:0.425360940911
epoch: 7
cost:0.319993936035
epoch: 8
cost:0.261747508807
epoch: 9
cost:0.212195380857
epoch: 10
cost:0.173756030171
epoch: 11
cost:0.122180152283
epoch: 12
cost:0.100535501767
epoch: 13
cost:0.0780195817168
epoch: 14
cost:0.0583299748963
epoch: 15
cost:0.0449164990371
epoch: 16
cost:0.0353347997375
epoch: 17
cost:0.0305565341201
epoch: 18
cost:0.0230029677286
epoch: 19
cost:0.0185571221061
epoch: 20
cost:0.0152225740659
epoch: 21
cost:0.0123142346698
epoch: 22
cost:0.0104881207325
epoch: 23
cost:0.00817393799651
epoch: 24
cost:0.00716153404696
epoch: 25
cost:0.00550355998767
epoch: 26
cost:0.00462543901228
epoch: 27
cost:0.00403649439196
epoch: 28
cost:0.00347731711206
epoch: 29
cost:0.00306094533208
epoch: 30
cost:0.00274912986588
epoch: 31
cost:0.00243458840639
epoch: 32
cost:0.00216610160523
epoch: 33
cost:0.00209072244731
epoch: 34
cost:0.00179946740704
epoch: 35
cost:0.00175951953511
epoch: 36
cost:0.00154790941192
epoch: 37
cost:0.0013557209352
epoch: 38
cost:0.00121621839265
epoch: 39
cost:0.00120535820758
epoch: 40
cost:0.0011936592352
epoch: 41
cost:0.00117897318199
epoch: 42
cost:0.00112183111794
epoch: 43
cost:0.00106810801166
epoch: 44
cost:0.000988904376303
epoch: 45
cost:0.00100936455839

In [7]:
layer1.weights


Out[7]:
array([[ 0.09674594],
       [-0.00482655],
       [-0.0022269 ],
       [ 0.98962467]])

In [8]:
layer = Layer("layer1", 3, 3, TanhActivation())
layer2 = Layer("layer2", 3, 1, LinearActivation())
nn = NeuralNet([layer, layer2], MeanSquaredError(), 0)

numGrads = nn.numerical_gradient(data, targets, 1e-5)
grads = nn.get_gradients(data, targets)

zipped = zip(numGrads, grads)

numGrad, grad = zipped[0]
sub = numGrad - grad
plus = numGrad + grad

numGrad2, grad2 = zipped[-1]
sub2 = numGrad2 - grad2
plus2 = numGrad2 + grad2

print numGrads
print grads


[array([[ 0.26065202,  0.12895171, -0.20813425],
       [ 1.35893086,  0.666569  , -1.02909514],
       [ 1.29858404,  0.62248331, -1.07620399],
       [ 1.83837481,  0.90952504, -1.44492936]]), array([[-5.31237668],
       [-0.08146868],
       [ 1.4177473 ],
       [ 1.77192302]])]
[array([[ 0.26065202,  0.12895171, -0.20813425],
       [ 1.35893087,  0.666569  , -1.02909514],
       [ 1.29858404,  0.62248331, -1.07620399],
       [ 1.83837481,  0.90952504, -1.44492936]]), array([[-5.31237668],
       [-0.08146868],
       [ 1.4177473 ],
       [ 1.77192302]])]