In [ ]:
In [4]:
%pylab inline
import nengo
from nengo.utils.ensemble import tuning_curves
model = nengo.Network()
with model:
a = nengo.Ensemble(100, dimensions=1, radius=3)
conn = nengo.Connection(a,a)
sim = nengo.Simulator(model)
x, A = tuning_curves(a, sim)
plot(x,A);
d = sim.data[conn].weights.T
xhat = numpy.dot(A, d)
figure()
plot(xhat)
Out[4]:
In [8]:
%pylab inline
In [18]:
import matplotlib.pyplot as plt
import numpy as np
#%% PART a Error correlation sigma = 0.1
N = np.array([4,8,16,32,64])#64,128,256,512,1024, 2048]) #number of neurons
samples = 2/0.01 #how many x values
runs = 10 #number of runs over which to average
Edist = np.zeros([N.size,runs])
Enoise = np.zeros(Edist.shape)
for i in range(N.size):
for j in range(runs):
rate = np.random.uniform(low = 100, high = 200, size = N[i]) #max fire rate
xint = np.random.uniform(low = -0.95, high = 0.95, size = N[i]) #intercept
alpha = rate/(1-xint)
Jbias = rate - alpha
e = np.random.randint(low = 0, high = 2, size = N[i]) * 2 - 1 #encoder preference
x = np.linspace(-1,1,samples)
J = np.zeros([N[i],np.size(x)]) #initialize
for k in range(N[i]):
J[k] = alpha[k]*e[k]*x + Jbias[k]
a = np.maximum(J,0) #Rectified Linear model
sigma = 0.1 *np.max(a) #noise distribution
gamma = np.dot(a,a.transpose())/samples
gamma = gamma + np.random.normal(scale=sigma, size=gamma.shape)
upsilon = np.dot(a,x)/samples
d = np.dot(np.linalg.pinv(gamma),upsilon)
Edist[i,j] = (np.sum(x-np.dot(d,a))**2)/(samples*2)
Enoise[i,j] = sigma**2*np.sum(d**2)
plt.figure()
plt.loglog(N,1./(N*N),'k--')
plt.loglog(N,np.average(Edist,1))
plt.title('Distortion Error $\sigma$ = 0.1')
plt.xlabel('Number of Neurons')
plt.ylabel('Error')
plt.legend(['$N^{-2}$', '$E_{Dist}$'])
plt.figure()
plt.loglog(N,1./N,'k--')
plt.loglog(N,np.average(Enoise))
plt.title('Noise Error $\sigma$ = 0.1')
plt.xlabel('Number of Neurons')
plt.ylabel('Error')
plt.legend(['$N^{-1}$', '$E_{Noise}$'])
In [16]:
Enoise
Out[16]:
In [5]:
1./(N*N)
Out[5]:
In [12]:
%pylab inline
y = np.zeros(6)
y[0]=0.505495628058
y[1]=0.330387457083
y[2]= 0.304713762529
y[3]= 0.209663572096
y[4]= 0.185968290356
y[5]= 0.163557041435
x = 2**np.arange(3,9)
loglog(x,y)
loglog(x,1./np.sqrt(x))
Out[12]:
In [17]:
t = linspace(0,1,1000)
tau=0.005
dt = .001
h = np.exp(-t/tau)
plot(t,h)
print np.sum(h)
r = h/(np.sum(h))
plot(t,r)
np.sum(r)*dt
Out[17]:
In [24]:
sigma = 20
gamma = np.random.rand(5,5)
np.random.normal(scale=sigma, size=gamma.shape)*np.eye(np.size(gamma,0))
np.eye(np.size(gamma,0))*sigma
Out[24]:
In [23]:
np.size(gamma,0)
Out[23]:
In [ ]: