In [5]:
# -*- coding: utf-8 -*-
""" 
Example of use multi-layer perceptron
=====================================

Task: Approximation function: 1/2 * sin(x)

"""

import neurolab as nl
import numpy as np

# Create train samples
x = np.linspace(-7, 7, 20)
y = np.sin(x) * 0.5

size = len(x)

inp = x.reshape(size,1)
tar = y.reshape(size,1)

# Create network with 2 layers and random initialized
net = nl.net.newff([[-7, 7]],[5, 1])

# Train network
error = net.train(inp, tar, epochs=500, show=100, goal=0.02)

# Simulate network
out = net.sim(inp)

# Plot result
import pylab as pl
pl.subplot(211)
pl.plot(error)
pl.xlabel('Epoch number')
pl.ylabel('error (default SSE)')

x2 = np.linspace(-6.0,6.0,150)
y2 = net.sim(x2.reshape(x2.size,1)).reshape(x2.size)

y3 = out.reshape(size)

pl.subplot(212)
pl.plot(x2, y2, '-',x , y, '.', x, y3, 'p')
pl.legend(['train target', 'net output'])
pl.show()


The goal of learning is reached

In [6]:
print inp


[[-7.        ]
 [-6.26315789]
 [-5.52631579]
 [-4.78947368]
 [-4.05263158]
 [-3.31578947]
 [-2.57894737]
 [-1.84210526]
 [-1.10526316]
 [-0.36842105]
 [ 0.36842105]
 [ 1.10526316]
 [ 1.84210526]
 [ 2.57894737]
 [ 3.31578947]
 [ 4.05263158]
 [ 4.78947368]
 [ 5.52631579]
 [ 6.26315789]
 [ 7.        ]]

In [ ]: