In [ ]:


In [4]:
%pylab inline
import nengo
from nengo.utils.ensemble import tuning_curves

model = nengo.Network()

with model:
    a = nengo.Ensemble(100, dimensions=1, radius=3)
    conn = nengo.Connection(a,a)
    
sim = nengo.Simulator(model)

x, A = tuning_curves(a, sim)

plot(x,A);

d = sim.data[conn].weights.T

xhat = numpy.dot(A, d)

figure()
plot(xhat)


Populating the interactive namespace from numpy and matplotlib
Out[4]:
[<matplotlib.lines.Line2D at 0x10bc60650>]

In [8]:
%pylab inline


Populating the interactive namespace from numpy and matplotlib
WARNING: pylab import has clobbered these variables: ['rate', 'e', 'gamma']
`%matplotlib` prevents importing * from pylab and numpy

In [18]:
import matplotlib.pyplot as plt
import numpy as np
#%% PART a Error correlation sigma = 0.1
N = np.array([4,8,16,32,64])#64,128,256,512,1024, 2048]) #number of neurons
samples = 2/0.01 #how many x values
runs = 10 #number of runs over which to average

Edist = np.zeros([N.size,runs])
Enoise = np.zeros(Edist.shape)
for i in range(N.size):
    for j in range(runs):
        rate = np.random.uniform(low = 100, high = 200, size = N[i]) #max fire rate
        xint = np.random.uniform(low = -0.95, high = 0.95, size = N[i]) #intercept
        alpha = rate/(1-xint)
        Jbias = rate - alpha
        e = np.random.randint(low = 0, high = 2, size = N[i]) * 2 - 1 #encoder preference
        x = np.linspace(-1,1,samples)
        J = np.zeros([N[i],np.size(x)]) #initialize 
        for k in range(N[i]):
            J[k] = alpha[k]*e[k]*x + Jbias[k]
            
        a = np.maximum(J,0) #Rectified Linear model   
        sigma = 0.1 *np.max(a) #noise distribution
        gamma = np.dot(a,a.transpose())/samples
        gamma = gamma + np.random.normal(scale=sigma, size=gamma.shape)
        upsilon = np.dot(a,x)/samples
        d = np.dot(np.linalg.pinv(gamma),upsilon)
        Edist[i,j] = (np.sum(x-np.dot(d,a))**2)/(samples*2)
        Enoise[i,j] = sigma**2*np.sum(d**2)
        
plt.figure()
plt.loglog(N,1./(N*N),'k--')
plt.loglog(N,np.average(Edist,1))
plt.title('Distortion Error $\sigma$ = 0.1')
plt.xlabel('Number of Neurons')
plt.ylabel('Error')
plt.legend(['$N^{-2}$', '$E_{Dist}$'])

plt.figure()
plt.loglog(N,1./N,'k--')
plt.loglog(N,np.average(Enoise))
plt.title('Noise Error $\sigma$ = 0.1')
plt.xlabel('Number of Neurons')
plt.ylabel('Error')
plt.legend(['$N^{-1}$', '$E_{Noise}$'])


189.735464417
198.463933048
186.551516525
177.916586867
179.431153718
193.759093603
193.5666484
195.558938711
168.906644342
186.801079605
179.050016908
199.593160867
189.217366973
143.878142065
199.790140222
190.42557341
184.717405803
199.824915701
193.063836993
177.945687896
195.400876153
195.796602301
199.928461615
195.396691719
195.537425614
193.347431584
168.148086648
198.750251148
197.061848614
197.687055306
197.559423773
196.837318567
198.07968524
198.167796613
199.413067528
197.522197456
199.732691133
199.694647364
198.022968106
194.297334863
199.586196132
197.46557148
199.664669482
197.792378844
199.064372909
196.426603677
197.889589934
198.221444788
199.549118656
198.883540885
---------------------------------------------------------------------------
ValueError                                Traceback (most recent call last)
<ipython-input-18-aa18eef8364e> in <module>()
     40 plt.figure()
     41 plt.loglog(N,1./N,'k--')
---> 42 plt.loglog(N,np.average(Enoise))
     43 plt.title('Noise Error $\sigma$ = 0.1')
     44 plt.xlabel('Number of Neurons')

/System/Library/Frameworks/Python.framework/Versions/2.7/Extras/lib/python/matplotlib/pyplot.pyc in loglog(*args, **kwargs)
   2908         ax.hold(hold)
   2909     try:
-> 2910         ret = ax.loglog(*args, **kwargs)
   2911         draw_if_interactive()
   2912     finally:

/System/Library/Frameworks/Python.framework/Versions/2.7/Extras/lib/python/matplotlib/axes.pyc in loglog(self, *args, **kwargs)
   4271         b = self._hold
   4272         self._hold = True  # we've already processed the hold
-> 4273         l = self.plot(*args, **kwargs)
   4274         self._hold = b  # restore the hold
   4275 

/System/Library/Frameworks/Python.framework/Versions/2.7/Extras/lib/python/matplotlib/axes.pyc in plot(self, *args, **kwargs)
   4135         lines = []
   4136 
-> 4137         for line in self._get_lines(*args, **kwargs):
   4138             self.add_line(line)
   4139             lines.append(line)

/System/Library/Frameworks/Python.framework/Versions/2.7/Extras/lib/python/matplotlib/axes.pyc in _grab_next_args(self, *args, **kwargs)
    315                 return
    316             if len(remaining) <= 3:
--> 317                 for seg in self._plot_args(remaining, kwargs):
    318                     yield seg
    319                 return

/System/Library/Frameworks/Python.framework/Versions/2.7/Extras/lib/python/matplotlib/axes.pyc in _plot_args(self, tup, kwargs)
    293             x = np.arange(y.shape[0], dtype=float)
    294 
--> 295         x, y = self._xy_from_xy(x, y)
    296 
    297         if self.command == 'plot':

/System/Library/Frameworks/Python.framework/Versions/2.7/Extras/lib/python/matplotlib/axes.pyc in _xy_from_xy(self, x, y)
    235         y = np.atleast_1d(y)
    236         if x.shape[0] != y.shape[0]:
--> 237             raise ValueError("x and y must have same first dimension")
    238         if x.ndim > 2 or y.ndim > 2:
    239             raise ValueError("x and y can be no greater than 2-D")

ValueError: x and y must have same first dimension

In [16]:
Enoise


Out[16]:
array([[  2.45661642e-01,   9.19495273e-02,   2.18758621e-01,
          5.13905234e-01,   3.26938194e-02,   2.45159327e-01,
          2.44540733e-02,   4.35639505e-02,   1.04375653e-01,
          2.95106163e-02],
       [  1.02808001e+03,   2.23005498e-02,   1.77082609e+00,
          6.34070150e-02,   2.28348426e+01,   2.53121695e-02,
          1.69086420e-01,   2.33601610e-02,   1.23221401e+00,
          2.17893806e-02],
       [  3.83669987e-01,   2.74259322e-02,   9.60227458e+04,
          4.72957003e-01,   4.59700316e-02,   2.46383720e+00,
          8.16279565e-02,   2.06214077e-02,   4.48536887e+00,
          8.64070568e+00],
       [  1.11869163e-01,   1.90590353e-02,   4.81711457e-01,
          2.72321946e-02,   5.68661003e-01,   4.40815163e+01,
          2.77808040e-01,   1.76114256e-02,   1.32437000e-02,
          1.85181001e-02],
       [  4.41644666e-02,   1.80450540e+00,   8.08999922e-01,
          8.72530951e-01,   4.20763690e-02,   7.49983706e+01,
          7.32186284e-02,   5.31384059e-01,   1.72000007e-02,
          3.91287622e+00],
       [  2.71521124e-01,   8.17967590e-02,   8.53884274e-01,
          3.80932291e-01,   2.72816223e-01,   5.41931909e-02,
          2.39783008e-01,   5.62519274e-02,   1.75864985e-01,
          9.90386515e-02]])

In [5]:
1./(N*N)


Out[5]:
array([ 0.0625    ,  0.015625  ,  0.00390625,  0.00097656])

In [12]:
%pylab inline
y = np.zeros(6)
y[0]=0.505495628058
y[1]=0.330387457083
y[2]= 0.304713762529
y[3]= 0.209663572096
y[4]= 0.185968290356
y[5]= 0.163557041435
    
x = 2**np.arange(3,9)

loglog(x,y)
loglog(x,1./np.sqrt(x))


Populating the interactive namespace from numpy and matplotlib
Out[12]:
[<matplotlib.lines.Line2D at 0x108e72690>]

In [17]:
t = linspace(0,1,1000)
tau=0.005
dt = .001
h = np.exp(-t/tau)
plot(t,h)

print np.sum(h)

r = h/(np.sum(h))
plot(t,r)
np.sum(r)*dt


5.51167221613
Out[17]:
0.001

In [24]:
sigma = 20
gamma = np.random.rand(5,5)
np.random.normal(scale=sigma, size=gamma.shape)*np.eye(np.size(gamma,0))
np.eye(np.size(gamma,0))*sigma


Out[24]:
array([[ 20.,   0.,   0.,   0.,   0.],
       [  0.,  20.,   0.,   0.,   0.],
       [  0.,   0.,  20.,   0.,   0.],
       [  0.,   0.,   0.,  20.,   0.],
       [  0.,   0.,   0.,   0.,  20.]])

In [23]:
np.size(gamma,0)


Out[23]:
5

In [ ]: