In [6]:
import syde556
a = syde556.Ensemble(neurons=50, dimensions=1)
x = numpy.linspace(-1, 1, 100)
ideal = x
d = a.compute_decoder(x, ideal, noise=0.2)
A, xhat = a.simulate_rate(x, d)
print 'RMSE:', sqrt(mean((ideal-xhat)**2))
subplot(2,1,1)
plot(x, ideal, color='k')
plot(x, xhat, color='b')
subplot(2,1,2)
plot(x, A)
show()
$f(x)=x^2$
In [19]:
import syde556
a = syde556.Ensemble(neurons=50, dimensions=1)
x = numpy.linspace(-1, 1, 100)
ideal = x**2
d = a.compute_decoder(x, ideal, noise=0.2)
A, xhat = a.simulate_rate(x, d)
print 'RMSE:', sqrt(mean((ideal-xhat)**2))
subplot(2,1,1)
plot(x, ideal, color='k')
plot(x, xhat, color='b')
subplot(2,1,2)
plot(x, A)
show()
Other exponents?
Polynomials?
Other functions?
$f(x)=e^{- (x-b)^2/(2c^2)}$
In [39]:
import syde556
a = syde556.Ensemble(neurons=100, dimensions=1)
x = numpy.linspace(-1, 1, 100)
c = 0.5
b = 0
ideal = exp(-(x-b)**2/(2*c**2))
d = a.compute_decoder(x, ideal, noise=0.2)
A, xhat = a.simulate_rate(x, d)
print 'RMSE:', sqrt(mean((ideal-xhat)**2))
subplot(2,1,1)
plot(x, ideal, color='k')
plot(x, xhat, color='b')
subplot(2,1,2)
plot(x, A)
show()
In [95]:
import syde556
a = syde556.Ensemble(neurons=300, dimensions=1, seed=0)
x = numpy.linspace(-1, 1, 100)
ideal = x
d = a.compute_decoder(x, ideal, noise=0.2)
A, xhat = a.simulate_rate(x, d)
Gamma = numpy.dot(A.T, A)
U,S,V = numpy.linalg.svd(Gamma)
chi = numpy.dot(A, U)
for i in range(5):
plot(x, chi[:,i], label='$\chi_%d$=%1.3g'%(i, S[i]), linewidth=3)
legend(loc='best', bbox_to_anchor=(1,1))
show()
In [97]:
subplot(1,2,1)
for i in range(5):
plot(x, chi[:,i], linewidth=3)
subplot(1,2,2)
for i in range(5):
plot(x, numpy.polynomial.legendre.legval(x, numpy.eye(5)[i]))
show()
In [92]:
plot(S)
show()
In [40]:
import syde556
a = syde556.Ensemble(neurons=100, dimensions=1, seed=0,
intercept=(-0.3, 0.3))
x = numpy.linspace(-1, 1, 100)
ideal = x
d = a.compute_decoder(x, ideal, noise=0.2)
A, xhat = a.simulate_rate(x, d)
Gamma = numpy.dot(A.T, A)
U,S,V = numpy.linalg.svd(Gamma)
chi = numpy.dot(A, U)
figure()
plot(x, A)
figure()
for i in range(6):
plot(x, chi[:,i], label='$\chi_%d$=%1.3g'%(i, S[i]), linewidth=3)
legend(loc='best', bbox_to_anchor=(1,1))
show()
In [36]:
import syde556
a = syde556.Ensemble(neurons=300, dimensions=2, seed=0)
x0 = numpy.linspace(-1,1,50)
x1 = numpy.linspace(-1,1,50)
x0, x1 = numpy.array(meshgrid(x0,x1))
x = numpy.array([numpy.ravel(x0), numpy.ravel(x1)])
d = a.compute_decoder(x, x, noise=0.2)
A, xhat = a.simulate_rate(x, d)
Gamma = numpy.dot(A.T, A)
U,S,V = numpy.linalg.svd(Gamma)
chi = numpy.dot(A, U)
index = 0
basis = chi[:,index]
basis.shape = 50,50
from mpl_toolkits.mplot3d.axes3d import Axes3D
fig = pylab.figure()
ax = fig.add_subplot(1, 1, 1, projection='3d')
p = ax.plot_surface(x0, x1, basis, linewidth=0, cstride=1, rstride=1, cmap=pylab.cm.jet)
pylab.title('Basis #%d (sv=%1.4g)'%(index, S[index]))
show()
In higher dimensions, higher order functions are even harder
This is all also going to be affected by distributions of encoders (as well as distributions of intercepts and maximum rates)
In [ ]: