In [121]:
from numpy import *
from scipy import *
from sklearn import *
In [129]:
T0 = 5000
T1 = 5000
In [130]:
# L layer
D = 2
N = 30
L = 2
alpha = 0.01
b0 = 0.01
corr = 0.0
maxSVD_recur = 0.95
maxSVD_inter = 0.5
taus = [0.01, 0.7]
In [131]:
def legendre(n, x):
if n==0:
return 1
elif n==1:
return x * (2*n+1)/2
elif n==2:
return (1.5*x*x-0.5) * (2*n+1)/2
elif n==3:
return (2.5*x*x*x-1.5*x) * (2*n+1)/2
In [322]:
a = loadtxt("input_D2_circ.txt")
#b2 = loadtxt("output_lin.txt")
#b1 = loadtxt("output_D2_circ_0.txt")
#b2 = loadtxt("output_D2_circ_1.txt")
b1 = loadtxt("output_D2_circ_0.txt")
b2 = loadtxt("output_D2_circ_1.txt")
b3 = loadtxt("output_D2_circ_2.txt")
b4 = loadtxt("output_D2_circ_3.txt")
print b1.shape
print b2.shape
In [36]:
figure()
clf()
plot(b2[1000:1400,0])
plot(b2[1000:1400,1])
plot(b2[1000:1400,2])
plot(b2[1000:1400,3])
#ylim(-1.0, 1.0)
Out[36]:
In [140]:
P = 40
ls = zeros((P+T1, D, 4), dtype=float)
for t in range(P+T1):
for i in range(D):
for j in range(4):
ls[t, i, j] = legendre(j, a[T0+t-P, i])
In [141]:
def calc_cap(ds, plotflag):
y = zeros(T1, dtype=float)
for i in range(T1):
tmp = 1.0
for j in ds:
tmp *= ls[i-j[0]+P, j[1], j[2]]
# for j in range(ds.shape[0]):
# for k in range(ds.shape[1]):
# tmp *= legendre(ds[j], a[T0+i-j])
# print j,k
# tmp *= ls[i-j+P, k, ds[j, k]]
y[i] = tmp
lin = linear_model.LinearRegression()
lin.fit(hstack([b1[T0:T0+T1,:], b2[T0:T0+T1,:]]), y[:T1])
z = lin.predict(hstack([b1[T0:,:], b2[T0:,:]]))
if plotflag:
figure()
plot(y)
plot(z)
mse = 0
yy = 0
for i in range(T1):
mse += (y[i] - z[i])**2
yy += y[i]**2
return 1.0 - mse/yy
In [142]:
memo = zeros(10000, dtype=float)
memos = []
cnt = 0
for i0,i1 in [(i0,i1) for i0 in range(40) for i1 in range(D)]:
ds = []
ds.append((i0, i1, 1))
# ds = zeros((30, D), dtype=float)
# ds[i0, i1] += 1
memo[cnt] = calc_cap(ds, False)
memos.append(str(ds))
#print cnt, memo[cnt], str(ds)
if cnt%20==0:
print cnt
cnt += 1
for i0,i1 in [(i0,i1) for i0 in range(12) for i1 in range(D)]:
for j0,j1 in [(j0,j1) for j0 in range(12) for j1 in range(D)]:
if j0*D+j1 >= i0*D+i1:
ds = []
if (i0,i1)==(j0,j1):
ds.append((i0, i1, 2))
else:
ds.append((i0, i1, 1))
ds.append((j0, j1, 1))
# ds = zeros((30, D), dtype=float)
# ds[i0, i1] += 1
# ds[j0, j1] += 1
memo[cnt] = calc_cap(ds, False)
memos.append(str(ds))
# print cnt, memo[cnt], str(ds)
if cnt%20==0:
print cnt
cnt += 1
for i0,i1 in [(i0,i1) for i0 in range(8) for i1 in range(D)]:
for j0,j1 in [(j0,j1) for j0 in range(8) for j1 in range(D)]:
for k0,k1 in [(k0,k1) for k0 in range(8) for k1 in range(D)]:
if k0*D+k1 >= j0*D+j1 and j0*D+j1 >= i0*D+i1:
ds = []
if (i0,i1)==(j0,j1)==(k0, k1):
ds.append((i0, i1, 3))
elif (i0,i1)==(j0,j1):
ds.append((i0, i1, 2))
ds.append((k0, k1, 1))
elif (k0,k1)==(j0,j1):
ds.append((i0, i1, 1))
ds.append((k0, k1, 2))
else:
ds.append((i0, i1, 2))
ds.append((j0, j1, 1))
ds.append((k0, k1, 1))
# ds = zeros((30, D), dtype=float)
# ds[i0, i1] += 1
# ds[j0, j1] += 1
memo[cnt] = calc_cap(ds, False)
memos.append(str(ds))
# print cnt, memo[cnt], str(ds)
if cnt%20==0:
print cnt
cnt += 1
In [115]:
sum(memo)
Out[115]:
In [56]:
print calc_cap([(3,0,3)], True)
xlim(0, 500)
Out[56]:
In [52]:
xlim(0, 1000)
Out[52]:
In [143]:
figure(18)
plot(memo[:385])
Out[143]:
In [22]:
yscale("log")
In [42]:
memos = array(memos)
memos[memo[:385]>0.005]
Out[42]:
In [116]:
memos.shape
In [64]:
1+1
Out[64]:
In [101]:
hstack([b1, b2]).shape
Out[101]:
In [145]:
a.shape
Out[145]:
In [327]:
lin = linear_model.LinearRegression()
lin.fit(b4[5000:20000,:], a[5000-13:20000-13])
Out[327]:
In [328]:
a_ = lin.predict(b4[5000:20000])
In [329]:
figure()
plot(a[5000-13:20000-13, 1])
plot(a_[:,1])
xlim(14000, 15000)
Out[329]:
In [331]:
s = 0
for i in range(5000,20000):
for j in range(2):
s += (a[i-13,j]-a_[i-5000,j])**2
print s/30000
In [ ]: