In [1]:
import theano
import numpy as np
import scipy as sp
from numpy import linalg as la, random as rnd
from theano import tensor
In [2]:
class WTT:
def __init__(self, nd, ranks):
self.compute_shapes(nd, ranks)
def compute_shapes(self, nd, ranks):
narr = np.array(n, dtype='int')
nranks= np.array([1] + ranks, dtype='int')
prods = narr * nranks
shapes = [(prod, prod) for prod in prods]
return shapes
def wtt_image()
def wtt_tensor_image(x, cores, n, ranks, rec_dep=0, mode="numeric", verbose=False):
rd = '\t'*rec_dep
if verbose:
print('{}Insided rec step {}:'.format(rd, rec_dep))
print('{}x shape is {}'.format(rd, x.shape))
xk = x
# SOOOOOOOQA
#r0 = int(x.size / np.prod(n))
r0 = 1
k = rec_dep
if k == len(n) - 1:
if verbose:
print('{}output of last iteration. Its shape = {}'.format(rd, complex_dot(hconj(cores[-1]), x).shape))
print('core {}: {}'.format(k, cores[k].get_value().shape))
return complex_dot(hconj(cores[-1]), x)
rk = ranks[k]
rkm1 = r0 if k == 0 else ranks[k-1]
nk = n[k]
xk = tensor.reshape(x, (2, rkm1 * nk, -1))
if verbose:
print('{}after first reshape with r[k-1]={} and nd[k]={} xk.shape = {}'.format(rd, rkm1, nk, xk.shape))
print('core {}: {}'.format(k, cores[k].get_value().shape))
xk = complex_dot(hconj(cores[k]), xk)
if verbose:
print('{}after dot with conj transposed cores[k] xk.shape = {}'.format(rd, xk.shape))
xk1 = xk[:, :rk, :]
if verbose:
print('{}after retrieve first r[k]={} rows from xk: xk1.shape = {}'.format(rd, rk, xk1.shape))
zk1 = xk[:, rk:rkm1 * nk, :]
if verbose:
print('{}after retrieve next rows from k: xk1.shape = {}'.format(rd, rk, xk1.shape))
xk1 = tensor.reshape(xk1, (2, -1))
if verbose:
print('{}after reshape xk1 to ravel: xk1.shape = {}'.format(rd, xk1.shape))
print('{}'.format((rd + '-'*40 + '\n') * 1))
print('{}Leap into next recursive step'.format(rd))
yk1 = comp_wtt_image(xk1, cores, n, ranks, rec_dep=rec_dep + 1, verbose=verbose)
if verbose:
print('{}Leap from next recursive step'.format(rd))
print('{}'.format((rd + '-'*40 + '\n') * 1))
print('{} result of next rec step shape: yk1.shape = {}'.format(rd, yk1.shape))
yk1 = tensor.reshape(yk1, (2, rk, -1))
if verbose:
print('{}after reshape yk1 to ravel: yk1.shape = {}'.format(rd, yk1.shape))
yk = tensor.concatenate([yk1, zk1], axis=1)
if verbose:
print('{}after concat yk1 and zk1: yk1 = {}, zk1 = {}, [yk1, zk1] = {}'.format(rd, yk1.shape, zk1.shape, yk.shape))
yk = tensor.reshape(yk, (2, -1))
if verbose:
print('{}finally reshape yk to ravel: yk.shape = {}'.format(rd, yk.shape))
return yk
In [4]:
shape = (2, 3)
print(dual_function(shape))
print(dual_function(shape, mode="sym"))
In [24]:
s = "xxx"
In [25]:
s.startswith("x")
Out[25]:
In [ ]: