Accompanying Readings: Chapter 8
Terry Stewart
In [ ]:
import nef
net = nef.Network('Recurrent Test 1')
net.make('A', neurons=1000, dimensions=1)
def feedback(x):
return x[0]+1
net.connect('A', 'A', func=feedback, pstc=0.1)
net.add_to_nengo()
net.view()
What are the precise dynamics?
What about $f(x)=-x$?
In [ ]:
import nef
net = nef.Network('Recurrent Test 1')
net.make('A', neurons=100, dimensions=1)
def feedback(x):
return -x[0]
net.connect('A', 'A', func=feedback)
net.add_to_nengo()
net.view()
In [ ]:
import nef
net = nef.Network('Recurrent Test 1')
net.make('A', neurons=100, dimensions=1)
def feedback(x):
return -x[0]
net.connect('A', 'A', func=feedback)
net.make_input('input', [0])
net.connect('input', 'A')
net.add_to_nengo()
net.view()
Does this make sense?
What about $f(x)=x^2$?
In [ ]:
import nef
net = nef.Network('Recurrent Test 1')
net.make('A', neurons=100, dimensions=1)
def feedback(x):
return x[0]*x[0]
net.connect('A', 'A', func=feedback)
net.make_input('input', [0])
net.connect('input', 'A')
net.add_to_nengo()
net.view()
In [18]:
import syde556
dt = 0.001
t = numpy.arange(1000)*dt
x = numpy.zeros(1000)
x[300:]=1
A = syde556.Ensemble(neurons=80, dimensions=1, max_rate=(50,100))
decoder = A.compute_decoder(x, x, noise=0.2)
spikes_A, xhat = A.simulate_spikes(x, decoder, tau=0.1)
import pylab
pylab.plot(t, x, label='$x$')
pylab.plot(t, xhat, label='$\hat{x}$')
pylab.legend(loc='best')
pylab.ylim(-0.2, 1.2)
pylab.show()
In [20]:
import syde556
dt = 0.001
t = numpy.arange(1000)*dt
x = numpy.zeros(1000)
x[300:]=1
A = syde556.Ensemble(neurons=80, dimensions=1, max_rate=(50,100))
decoder = A.compute_decoder(x, x, noise=0.2)
spikes_A, xhat = A.simulate_spikes(x, decoder, tau=0.03)
import pylab
pylab.plot(t, x, label='$x$')
pylab.plot(t, xhat, label='$\hat{x}$')
pylab.legend(loc='best')
pylab.ylim(-0.2, 1.2)
pylab.show()
In [28]:
import syde556
dt = 0.001
t = numpy.arange(1000)*dt
x = numpy.zeros(1000)
x[300:]=1
h = syde556.psc_filter(t-0.5, 0.1)
fx = syde556.apply_filter(numpy.array(x)[:,None], h)*dt
A = syde556.Ensemble(neurons=80, dimensions=1, max_rate=(50,100))
decoder = A.compute_decoder(x, x, noise=0.2)
spikes_A, xhat = A.simulate_spikes(x, decoder, tau=0.1)
import pylab
pylab.plot(t, x, label='$x(t)$')
pylab.plot(t, xhat, label='$\hat{x}(t)$')
pylab.plot(t, fx, label='$x(t)*h(t)$')
pylab.legend(loc='lower right')
pylab.ylim(-0.2, 1.2)
pylab.show()
$x(t) = f(x(t))*h(t)$
where $$ h(t) = \begin{cases} e^{-t/\tau} &\mbox{if } t > 0 \\ 0 &\mbox{otherwise} \end{cases} $$
How can we work with this?
General rule of thumb: convolutions are annoying, so let's get rid of them
$X=F {1 \over {1+s\tau}}$
$X(1+s\tau) = F$
$X + Xs\tau = F$
$sX = {1 \over \tau} (F-X)$
${dx \over dt} = {1 \over \tau} (f(x)-x)$
This says that if we do a recurrent connection, we actually end up implementing a differential equation
So what happened with $f(x)=x+1$?
and $f(x)=x^2$?
What if there's some differential equation we really want to implement?
${dx \over dt} = {1 \over \tau} (f(x)-x + g(u))$
So if you have some input that you want added to ${dx \over dt}$, you need to scale it by $\tau$
This lets us do any differential equation of the form ${dx \over dt}=f(x)+g(u)$
${dp \over dt}=v$
So, to get it in the standard form ${dx \over dt}=f(x)+g(u)$ we have:
In [ ]:
import nef
tau=0.1
net = nef.Network('Neural Integrator')
net.make('velocity', neurons=100, dimensions=1)
net.make('position', neurons=100, dimensions=1)
def recurrent(x):
return x[0]
net.connect('position', 'position', func=recurrent, pstc=tau)
net.connect('velocity', 'position', transform=[[tau]])
net.make_input('input', [0])
net.connect('input', 'velocity')
net.add_to_nengo()
net.view()
In [6]:
import syde556
reload(syde556)
x = numpy.linspace(-1, 1, 100)
A = syde556.Ensemble(neurons=200, dimensions=1, seed=1, tau_rc=0.02)
decoder = A.compute_decoder(x, x, noise=0.1)
activity, xhat = A.simulate_rate(x, decoder)
import pylab
pylab.axhline(0, color='k')
pylab.plot(x, x-xhat, label='$x-\hat{x}$')
pylab.legend(loc='best')
pylab.xlabel('$x$')
pylab.show()
${dp \over dt}=v - {p \over \tau_c}$
$\tau_c$ is the time constant of that return to centre
$f'(p)=\tau ({-p \over \tau_c})+p$
In [ ]:
import nef
tau=0.1
tau_centre=1.0
net = nef.Network('Neural Integrator')
net.make('velocity', neurons=100, dimensions=1)
net.make('position', neurons=100, dimensions=1)
def recurrent(x):
return (1-tau/tau_centre)*x[0]
net.connect('position', 'position', func=recurrent, pstc=tau)
net.connect('velocity', 'position', transform=[[tau]])
net.make_input('input', [0])
net.connect('input', 'velocity')
net.add_to_nengo()
net.view()
${dp \over dt}= v - p d$
So there are two inputs: $v$ and $d$
Can we write this as ${dx \over dt}=f(x)+g(u)$?
In [ ]:
import nef
tau=0.1
tau_centre=1.0
net = nef.Network('Controlled Neural Integrator')
net.make('decay', neurons=100, dimensions=1)
net.make('velocity', neurons=100, dimensions=1)
net.make('position', neurons=200, dimensions=2)
def recurrent(x):
return x[0]-x[1]*x[0], 0
net.connect('position', 'position', func=recurrent, pstc=tau)
net.connect('velocity', 'position', transform=[tau, 0])
net.connect('decay', 'position', transform=[0, 1])
net.make_input('input', [0])
net.connect('input', 'velocity')
net.make_input('input_decay', [0])
net.connect('input_decay', 'decay')
net.add_to_nengo()
net.view()
In [ ]:
import nef
net = nef.Network('Linear Oscillator')
net.make('A', neurons=100, dimensions=2)
def feedback(x):
return x[0]+x[1], -x[0]+x[1]
net.connect('A', 'A', func=feedback, pstc=0.01)
net.make_input('input', [0])
net.connect('input', 'A')
net.add_to_nengo()
net.view()
Lorenz Attractor
In [ ]:
tau = 0.1
sigma = 10
beta = 8.0/3
rho = 28
import nef
net = nef.Network('Lorenz attractor', seed=6)
net.make('A', 2000, 3, radius=60)
def feedback(x):
dx0 = -sigma * x[0] + sigma * x[1]
dx1 = -x[0] * x[2] - x[1]
dx2 = x[0] * x[1] - beta * (x[2] + rho) - rho
return [dx0 * tau + x[0],
dx1 * tau + x[1],
dx2 * tau + x[2]]
net.connect('A', 'A', func=feedback, pstc=tau)
net.view()
In [ ]:
import nef
net = nef.Network('Square Oscillator')
net.make('A', neurons=1000, dimensions=2)
tau = 0.02
r=4
def feedback(x):
if abs(x[1])>abs(x[0]):
if x[1]>0: dx=[r, 0]
else: dx=[-r, 0]
else:
if x[0]>0: dx=[0, -r]
else: dx=[0, r]
return [tau*dx[0]+x[0], tau*dx[1]+x[1]]
net.connect('A', 'A', func=feedback, pstc=tau)
net.make_input('input', [0,0])
net.connect('input', 'A')
net.add_to_nengo()
net.view()
r
)?
In [ ]:
import nef
net = nef.Network('Heart Oscillator')
net.make('A', neurons=1000, dimensions=2)
tau = 0.02
r=4
def feedback(x):
return [-tau*r*x[1]+x[0], tau*r*x[0]+x[1]]
net.connect('A', 'A', func=feedback, pstc=tau)
from math import atan2, sin, cos, sqrt
def heart(x):
theta = atan2(x[1], x[0])
r = 2 - 2 * sin(theta) + sin(theta)*sqrt(abs(cos(theta)))/(sin(theta)+1.4)
return [-r*cos(theta), r*sin(theta)]
net.make('B', neurons=10, dimensions=2)
net.connect('A', 'B', func=heart, pstc=tau)
net.make_input('input', [0,0])
net.connect('input', 'A')
net.add_to_nengo()
net.view()
In [ ]:
import nef
net = nef.Network('Controlled Oscillator')
net.make('A', neurons=500, dimensions=3, radius=1.7)
tau = 0.1
scale = 10
def feedback(x):
return x[1]*x[2]*scale*tau+1.1*x[0], -x[0]*x[2]*scale*tau+1.1*x[1], 0
net.connect('A', 'A', func=feedback, pstc=tau)
net.make_input('input', [0,0,0])
net.connect('input', 'A')
net.add_to_nengo()
net.view()