In [1]:
from numpy import *
from scipy import *

In [95]:
def activate_function(x):
    return tanh(x)
 
def sigmoid(x):
    return 1. / (1 + numpy.exp(-x))

class RBM(object):
    def __init__(self, input=None, n_visible=2, n_hidden=3, \
        W=None, hbias=None, vbias=None, numpy_rng=None):
        
        self.n_visible = n_visible  # num of units in visible (input) layer
        self.n_hidden = n_hidden    # num of units in hidden layer
 
        if numpy_rng is None:
            numpy_rng = numpy.random.RandomState(1234)
 
 
        if W is None:
            a = 1. / n_visible
            initial_W = numpy.array(numpy_rng.uniform(  # initialize W uniformly
                low=-a,
                high=a,
                size=(n_visible, n_hidden)))
 
            W = initial_W
 
        if hbias is None:
            hbias = numpy.zeros(n_hidden)  # initialize h bias 0
 
        if vbias is None:
            vbias = numpy.zeros(n_visible)  # initialize v bias 0
 
 
        self.numpy_rng = numpy_rng
        self.input = input
        self.W = W
        self.hbias = hbias
        self.vbias = vbias
 
        # self.params = [self.W, self.hbias, self.vbias]
 
 
    def contrastive_divergence(self, lr=0.1, k=1, input=None):
        if input is not None:
            self.input = input
        
        ''' CD-k '''
        ph_mean, ph_sample = self.sample_h_given_v(self.input)
 
        chain_start = ph_sample
 
        for step in xrange(k):
            if step == 0:
                nv_means, nv_samples,\
                nh_means, nh_samples = self.gibbs_hvh(chain_start)
            else:
                nv_means, nv_samples,\
                nh_means, nh_samples = self.gibbs_hvh(nh_samples)
 
        # chain_end = nv_samples
 
 
        self.W += lr * (numpy.dot(self.input.T, ph_sample)
                        - numpy.dot(nv_samples.T, nh_means))
        self.vbias += lr * numpy.mean(self.input - nv_samples, axis=0)
        self.hbias += lr * numpy.mean(ph_sample - nh_means, axis=0)
 
        # cost = self.get_reconstruction_cross_entropy()
        # return cost
 
 
    def sample_h_given_v(self, v0_sample):
        h1_mean = self.propup(v0_sample)
        h1_sample = self.numpy_rng.binomial(size=h1_mean.shape,   # discrete: binomial
                                       n=1,
                                       p=h1_mean)
 
        return [h1_mean, h1_sample]
 
 
    def sample_v_given_h(self, h0_sample):
        v1_mean = self.propdown(h0_sample)
#        v1_sample = self.numpy_rng.binomial(size=v1_mean.shape,   # discrete: binomial
#                                            n=1,
#                                            p=v1_mean)
        v1_sample = self.numpy_rng.normal(size=v1_mean.shape, loc=v1_mean, scale=0.03)
#        v1_sample = v1_mean
        
        return [v1_mean, v1_sample]
 
    def propup(self, v):
        pre_sigmoid_activation = numpy.dot(v, self.W) + self.hbias
        return sigmoid(pre_sigmoid_activation)
 
    def propdown(self, h):
        pre_sigmoid_activation = numpy.dot(h, self.W.T) + self.vbias
        return sigmoid(pre_sigmoid_activation)
 
 
    def gibbs_hvh(self, h0_sample):
        v1_mean, v1_sample = self.sample_v_given_h(h0_sample)
        h1_mean, h1_sample = self.sample_h_given_v(v1_sample)
 
        return [v1_mean, v1_sample,
                h1_mean, h1_sample]
    
 
    def get_reconstruction_cross_entropy(self):
        pre_sigmoid_activation_h = numpy.dot(self.input, self.W) + self.hbias
        sigmoid_activation_h = (sigmoid(pre_sigmoid_activation_h))
        
        pre_sigmoid_activation_v = numpy.dot(sigmoid_activation_h, self.W.T) + self.vbias
        sigmoid_activation_v = (sigmoid(pre_sigmoid_activation_v))
 
        cross_entropy =  - numpy.mean(
            numpy.sum(self.input * numpy.log(sigmoid_activation_v) +
            (1 - self.input) * numpy.log(1 - sigmoid_activation_v)))
        
        return cross_entropy
 
    def reconstruct(self, v):
        h = sigmoid(numpy.dot(v, self.W) + self.hbias)
        reconstructed_v = sigmoid(numpy.dot(h, self.W.T) + self.vbias)
        return reconstructed_v

In [3]:
def test_rbm(learning_rate=0.1, k=1, training_epochs=1000):
    data = numpy.array([[1,1,1,0,0,0],
                        [1,0,1,0,0,0],
                        [1,1,1,0,0,0],
                        [0,0,1,1,1,0],
                        [0,0,1,1,0,0],
                        [0,0,1,1,1,0]])
 
 
 
    rng = numpy.random.RandomState(123)
 
    # construct RBM
    rbm = RBM(input=data, n_visible=6, n_hidden=2, numpy_rng=rng)
 
    # train
    for epoch in xrange(training_epochs):
        rbm.contrastive_divergence(lr=learning_rate, k=k)
        cost = rbm.get_reconstruction_cross_entropy()
        print >> sys.stderr, 'Training epoch %d, cost is ' % epoch, cost
 
 
    # test
    v = numpy.array([[0, 0, 0, 1, 1, 0],
                     [1, 1, 0, 0, 0, 0]])
 
    print rbm.reconstruct(v)

In [4]:
T0 = 10000
T1 = 30000

N = 50
alpha = 0.1
maxSVD = 0.95

In [5]:
# generate weight matrix
V = zeros((N, N), dtype=float)
for i in range(N):
    for j in range(N):
        V[i,j] = random.uniform(-1, 1) if random.uniform(0, 1)<0.1 else 0
u,s,v = linalg.svd(V)
print s
for i in range(N):
    for j in range(N):
        V[i,j] *= maxSVD/s[0]
u,s,v = linalg.svd(V)
print s

U = zeros(N, dtype=float)
for i in range(N):
    U[i] = random.uniform(-1, 1)


[  2.52240026e+00   2.46233565e+00   2.25604533e+00   2.25026490e+00
   2.18483351e+00   2.11039785e+00   2.02552943e+00   1.93935739e+00
   1.88559478e+00   1.77287217e+00   1.72651502e+00   1.64958585e+00
   1.61975458e+00   1.57116660e+00   1.49954661e+00   1.37109393e+00
   1.33745598e+00   1.26449346e+00   1.20291183e+00   1.15822807e+00
   1.09078789e+00   1.04476992e+00   1.01742589e+00   9.40595410e-01
   8.93234262e-01   8.88531770e-01   8.02528064e-01   7.65918563e-01
   7.38382379e-01   6.67053882e-01   6.34609594e-01   6.09646418e-01
   5.46933082e-01   5.05660677e-01   4.66581356e-01   4.40617627e-01
   4.01439046e-01   3.47542274e-01   3.28751981e-01   2.65653302e-01
   2.36207315e-01   2.03525177e-01   1.65744921e-01   1.50536693e-01
   1.36533545e-01   6.92303094e-02   4.34668564e-02   1.62731637e-02
   8.96566828e-03   9.04998520e-16]
[  9.50000000e-01   9.27378145e-01   8.49683971e-01   8.47506913e-01
   8.22863791e-01   7.94829429e-01   7.62865829e-01   7.30411248e-01
   7.10162885e-01   6.67708688e-01   6.50249405e-01   6.21275928e-01
   6.10040711e-01   5.91741247e-01   5.64767339e-01   5.16388796e-01
   5.03719889e-01   4.76240353e-01   4.53047146e-01   4.36218109e-01
   4.10818423e-01   3.93486887e-01   3.83188432e-01   3.54252120e-01
   3.36414709e-01   3.34643631e-01   3.02252451e-01   2.88464384e-01
   2.78093557e-01   2.51229434e-01   2.39010091e-01   2.29608325e-01
   2.05988889e-01   1.90444654e-01   1.75726389e-01   1.65947789e-01
   1.51192141e-01   1.30893247e-01   1.23816346e-01   1.00051780e-01
   8.89616739e-02   7.66527507e-02   6.24237470e-02   5.66959419e-02
   5.14220005e-02   2.60738928e-02   1.63707221e-02   6.12888676e-03
   3.37669838e-03   3.40845427e-16]

In [96]:
# generate output sequence
rbm = RBM(n_visible=N, n_hidden=N)

vfunc = vectorize(activate_function, otypes=[float])
x = zeros(N, dtype=float)
xxv = zeros((T0+T1, N), dtype=float)
xxh = zeros((T0+T1, N), dtype=float)
a = loadtxt("input.txt")
for i in range(N):
    x[i] = random.uniform(-1, 1)
for i in range(T0+T1):
    hm, hs = rbm.sample_h_given_v((x+1)/2)
    x = dot(V, x) + 0.01*(dot(hs, rbm.W.T) + rbm.vbias)
    x += alpha*a[i]*U
    x = vfunc(x)
#    xx[i,:] = x[:]
    rbm.contrastive_divergence(lr=0.05, input=(x+1)/2)
#    cost = rbm.get_reconstruction_cross_entropy()
#    if i%100==0:
#        print cost
for i in range(N):
    x[i] = random.uniform(-1, 1)
for i in range(T0+T1):
    hm, hs = rbm.sample_h_given_v((x+1)/2)
    x = dot(V, x) + 0.01*(dot(hs, rbm.W.T) + rbm.vbias)
    x += alpha*a[i]*U
    x = vfunc(x)
    xxv[i,:] = x
    xxh[i,:] = hm[:]

In [97]:
xxh[1000:1010]


Out[97]:
array([[  3.26330198e-12,   3.12884364e-12,   3.03863055e-12,
          2.96258065e-12,   3.07979320e-12,   3.17844248e-12,
          3.15286179e-12,   3.33449329e-12,   3.06731800e-12,
          2.85564964e-12,   3.01699613e-12,   3.10102218e-12,
          3.07752148e-12,   3.24309284e-12,   3.05924331e-12,
          2.81305056e-12,   3.04033674e-12,   3.13640030e-12,
          3.24310305e-12,   3.01335994e-12,   3.08683053e-12,
          3.20879585e-12,   2.94582656e-12,   3.01934442e-12,
          3.23104858e-12,   3.16547519e-12,   2.92932477e-12,
          3.13952919e-12,   3.07569884e-12,   3.13399750e-12,
          2.95835216e-12,   3.01286695e-12,   3.07296177e-12,
          3.15392589e-12,   3.06717310e-12,   2.78446253e-12,
          3.07450116e-12,   2.80853675e-12,   3.26964005e-12,
          3.05939904e-12,   2.83848183e-12,   2.88487891e-12,
          2.92794599e-12,   3.24088174e-12,   2.91197616e-12,
          3.03590836e-12,   3.02301608e-12,   3.00118049e-12,
          3.03508284e-12,   2.93656581e-12],
       [  2.93296958e-12,   2.81104755e-12,   2.74167558e-12,
          2.65696868e-12,   2.76837583e-12,   2.86568831e-12,
          2.84007984e-12,   2.97693145e-12,   2.74468344e-12,
          2.56005224e-12,   2.71047275e-12,   2.78662858e-12,
          2.76804221e-12,   2.91100288e-12,   2.74510492e-12,
          2.52659287e-12,   2.73582987e-12,   2.82015373e-12,
          2.90456727e-12,   2.70114741e-12,   2.76969622e-12,
          2.89021519e-12,   2.65336971e-12,   2.71478514e-12,
          2.90434781e-12,   2.84529585e-12,   2.62444720e-12,
          2.82460517e-12,   2.76507613e-12,   2.81634121e-12,
          2.65399558e-12,   2.70349257e-12,   2.75446783e-12,
          2.83402802e-12,   2.75848058e-12,   2.50323962e-12,
          2.76138646e-12,   2.51431939e-12,   2.93633892e-12,
          2.75184160e-12,   2.54248088e-12,   2.60132619e-12,
          2.63693533e-12,   2.91480827e-12,   2.61204152e-12,
          2.72374826e-12,   2.71878573e-12,   2.69727044e-12,
          2.73217656e-12,   2.64235330e-12],
       [  3.33504895e-12,   3.20231218e-12,   3.11944905e-12,
          3.02394482e-12,   3.15205081e-12,   3.25542132e-12,
          3.22943894e-12,   3.38434921e-12,   3.12359766e-12,
          2.91028432e-12,   3.08326723e-12,   3.16796389e-12,
          3.14694702e-12,   3.31103630e-12,   3.12584406e-12,
          2.87972917e-12,   3.11154423e-12,   3.20963507e-12,
          3.30339532e-12,   3.07665871e-12,   3.14625106e-12,
          3.28228846e-12,   3.02062729e-12,   3.08919014e-12,
          3.30425283e-12,   3.23755177e-12,   2.98574882e-12,
          3.21164145e-12,   3.14401892e-12,   3.20236642e-12,
          3.01489597e-12,   3.07504098e-12,   3.13245004e-12,
          3.22326596e-12,   3.13764881e-12,   2.84776435e-12,
          3.14146546e-12,   2.86384343e-12,   3.34195246e-12,
          3.12454506e-12,   2.89420949e-12,   2.95848086e-12,
          2.99918043e-12,   3.31662975e-12,   2.97344331e-12,
          3.09740975e-12,   3.09231116e-12,   3.06975872e-12,
          3.10947312e-12,   3.00560470e-12],
       [  3.50353036e-12,   3.36554953e-12,   3.26971784e-12,
          3.18220198e-12,   3.31109094e-12,   3.41461846e-12,
          3.38878316e-12,   3.57366801e-12,   3.29317567e-12,
          3.06435089e-12,   3.24063507e-12,   3.32952736e-12,
          3.30497802e-12,   3.48213240e-12,   3.28697901e-12,
          3.02624889e-12,   3.26730287e-12,   3.37231497e-12,
          3.47996916e-12,   3.23815144e-12,   3.31091451e-12,
          3.44614555e-12,   3.17031889e-12,   3.24517840e-12,
          3.47149407e-12,   3.40080529e-12,   3.14461548e-12,
          3.37186638e-12,   3.30473315e-12,   3.36639938e-12,
          3.17306463e-12,   3.23606584e-12,   3.29914814e-12,
          3.38732867e-12,   3.29579779e-12,   2.99252469e-12,
          3.30340013e-12,   3.01620726e-12,   3.51525639e-12,
          3.28197060e-12,   3.04835541e-12,   3.10328741e-12,
          3.14880456e-12,   3.48414770e-12,   3.12933179e-12,
          3.25846703e-12,   3.24886128e-12,   3.22690464e-12,
          3.26305730e-12,   3.15587470e-12],
       [  2.55802479e-12,   2.44910114e-12,   2.39829673e-12,
          2.30821232e-12,   2.41360988e-12,   2.50623867e-12,
          2.48112566e-12,   2.56998420e-12,   2.37737112e-12,
          2.22335171e-12,   2.36274776e-12,   2.42841887e-12,
          2.41309402e-12,   2.53722356e-12,   2.39433551e-12,
          2.20413572e-12,   2.38583717e-12,   2.45509871e-12,
          2.51966933e-12,   2.34888118e-12,   2.40397935e-12,
          2.52213617e-12,   2.31839801e-12,   2.36383209e-12,
          2.53226332e-12,   2.48182300e-12,   2.28120618e-12,
          2.46981020e-12,   2.40546839e-12,   2.45228881e-12,
          2.30056988e-12,   2.34481248e-12,   2.38658542e-12,
          2.47107083e-12,   2.40681854e-12,   2.17944750e-12,
          2.40235365e-12,   2.18762953e-12,   2.55367511e-12,
          2.40074531e-12,   2.20541445e-12,   2.27353099e-12,
          2.29627699e-12,   2.54212708e-12,   2.26848313e-12,
          2.37362753e-12,   2.36896089e-12,   2.34813459e-12,
          2.38869696e-12,   2.30759493e-12],
       [  4.09839763e-12,   3.94118951e-12,   3.83071989e-12,
          3.71823421e-12,   3.87862897e-12,   3.99067150e-12,
          3.96443855e-12,   4.15350182e-12,   3.83731659e-12,
          3.57233684e-12,   3.78881706e-12,   3.88742100e-12,
          3.86407487e-12,   4.07082492e-12,   3.84924858e-12,
          3.54873335e-12,   3.81948323e-12,   3.94183286e-12,
          4.05888930e-12,   3.78836936e-12,   3.85756658e-12,
          4.01854776e-12,   3.71054080e-12,   3.79654182e-12,
          4.06142602e-12,   3.98197077e-12,   3.66814711e-12,
          3.94634567e-12,   3.85365881e-12,   3.93022149e-12,
          3.69762893e-12,   3.77442904e-12,   3.84461771e-12,
          3.96096398e-12,   3.85379305e-12,   3.49814163e-12,
          3.85722151e-12,   3.52857269e-12,   4.10334364e-12,
          3.82998028e-12,   3.56036054e-12,   3.62766863e-12,
          3.67676334e-12,   4.07365406e-12,   3.65259356e-12,
          3.80492624e-12,   3.79476555e-12,   3.76829897e-12,
          3.82407884e-12,   3.69182192e-12],
       [  3.29301490e-12,   3.15807135e-12,   3.07037569e-12,
          2.98544192e-12,   3.10863929e-12,   3.20871325e-12,
          3.18312796e-12,   3.34919284e-12,   3.08633813e-12,
          2.87519927e-12,   3.04432006e-12,   3.12844299e-12,
          3.10396162e-12,   3.27164053e-12,   3.09076933e-12,
          2.84278207e-12,   3.06770679e-12,   3.16287456e-12,
          3.26396154e-12,   3.03888052e-12,   3.10577459e-12,
          3.23560946e-12,   2.97562964e-12,   3.04502601e-12,
          3.26005463e-12,   3.19566198e-12,   2.95285571e-12,
          3.17185166e-12,   3.09958253e-12,   3.15980006e-12,
          2.97365300e-12,   3.03078042e-12,   3.09043416e-12,
          3.18277307e-12,   3.09608547e-12,   2.80716035e-12,
          3.09854383e-12,   2.83465271e-12,   3.29604636e-12,
          3.08594721e-12,   2.85802885e-12,   2.91285686e-12,
          2.95160591e-12,   3.27067756e-12,   2.93431371e-12,
          3.06447268e-12,   3.04861559e-12,   3.02752323e-12,
          3.06728844e-12,   2.96551089e-12],
       [  3.41393782e-12,   3.27551319e-12,   3.18325370e-12,
          3.09843113e-12,   3.22467056e-12,   3.32637177e-12,
          3.30056261e-12,   3.48013415e-12,   3.20522579e-12,
          2.98457732e-12,   3.15586151e-12,   3.24238715e-12,
          3.22027467e-12,   3.39204001e-12,   3.20013289e-12,
          2.94515306e-12,   3.18157847e-12,   3.28254320e-12,
          3.38974473e-12,   3.15253961e-12,   3.22626234e-12,
          3.35587725e-12,   3.08480307e-12,   3.16020264e-12,
          3.38123312e-12,   3.31287100e-12,   3.06100204e-12,
          3.28511090e-12,   3.21641721e-12,   3.27774257e-12,
          3.09283160e-12,   3.15161774e-12,   3.21250979e-12,
          3.29918586e-12,   3.20922530e-12,   2.91407766e-12,
          3.21625848e-12,   2.93643433e-12,   3.42000312e-12,
          3.19828580e-12,   2.96856842e-12,   3.02027284e-12,
          3.06442987e-12,   3.39171601e-12,   3.04516591e-12,
          3.17254392e-12,   3.16298317e-12,   3.13945962e-12,
          3.17780626e-12,   3.07301556e-12],
       [  2.57929945e-12,   2.46870791e-12,   2.41822065e-12,
          2.32696367e-12,   2.43309616e-12,   2.52636061e-12,
          2.50123773e-12,   2.59022398e-12,   2.39565881e-12,
          2.24056819e-12,   2.38178185e-12,   2.44849035e-12,
          2.43307411e-12,   2.55660886e-12,   2.41353253e-12,
          2.22190000e-12,   2.40561942e-12,   2.47535571e-12,
          2.53916614e-12,   2.36719552e-12,   2.42351696e-12,
          2.54223442e-12,   2.33657600e-12,   2.38370000e-12,
          2.55271222e-12,   2.50235382e-12,   2.29897965e-12,
          2.48948760e-12,   2.42536487e-12,   2.47195641e-12,
          2.31930850e-12,   2.36323165e-12,   2.40539143e-12,
          2.49110830e-12,   2.42621343e-12,   2.19695317e-12,
          2.42130653e-12,   2.20462270e-12,   2.57375983e-12,
          2.42072112e-12,   2.22267568e-12,   2.29177637e-12,
          2.31545139e-12,   2.56224023e-12,   2.28692147e-12,
          2.39296297e-12,   2.38802024e-12,   2.36730081e-12,
          2.40831249e-12,   2.32627450e-12],
       [  3.47917140e-12,   3.33995630e-12,   3.26131027e-12,
          3.14238837e-12,   3.29045953e-12,   3.39675105e-12,
          3.37038947e-12,   3.48905907e-12,   3.23300339e-12,
          3.01808778e-12,   3.21321009e-12,   3.29725704e-12,
          3.27934366e-12,   3.45035374e-12,   3.26421370e-12,
          3.01026623e-12,   3.24240927e-12,   3.34008644e-12,
          3.42531901e-12,   3.20361545e-12,   3.25963649e-12,
          3.41329070e-12,   3.15370662e-12,   3.21752477e-12,
          3.44561323e-12,   3.37973855e-12,   3.10066740e-12,
          3.35679395e-12,   3.26386029e-12,   3.32949799e-12,
          3.12086757e-12,   3.18675398e-12,   3.24138458e-12,
          3.35975033e-12,   3.27144734e-12,   2.96339273e-12,
          3.26564463e-12,   2.98404378e-12,   3.47098047e-12,
          3.25248673e-12,   3.00388469e-12,   3.08590989e-12,
          3.11767556e-12,   3.45654555e-12,   3.08682349e-12,
          3.22487676e-12,   3.21826076e-12,   3.19189823e-12,
          3.25308830e-12,   3.13685663e-12]])

In [53]:
savetxt("output_rbm_v.txt", xxv)
savetxt("output_rbm_h.txt", xxh)

In [41]:
random.normal(loc=array([1,2,3,4,5]), scale=0.01)


Out[41]:
array([ 1.00032585,  1.99398369,  2.99338446,  3.99613071,  5.00605741])

In [98]:
linalg.svd(rbm.W)


Out[98]:
(array([[-0.14128538, -0.06712003,  0.10485693, ..., -0.00937246,
         0.04665777, -0.06273721],
       [-0.14133038,  0.06752818, -0.24800207, ...,  0.00722204,
         0.2112421 , -0.06700553],
       [-0.14147547,  0.25230142,  0.01731473, ..., -0.03184761,
         0.01600537,  0.06548917],
       ..., 
       [-0.14117788,  0.14214572,  0.11313055, ...,  0.1243244 ,
         0.11608697,  0.27389543],
       [-0.14172405,  0.01287219, -0.21028806, ...,  0.14285088,
        -0.31876251,  0.10365011],
       [-0.14181376, -0.15219278, -0.07007876, ..., -0.22697208,
        -0.23012045,  0.19444713]]),
 array([  5.30153291e+01,   1.57300926e-01,   1.48547699e-01,
         1.43894268e-01,   1.38502991e-01,   1.34193771e-01,
         1.32496663e-01,   1.24028637e-01,   1.20666589e-01,
         1.17515413e-01,   1.15342023e-01,   1.11975796e-01,
         1.03097799e-01,   1.00465588e-01,   9.75922692e-02,
         9.51993565e-02,   9.15110544e-02,   9.05465007e-02,
         8.87508448e-02,   8.43688435e-02,   8.39877385e-02,
         7.83855288e-02,   7.56985069e-02,   7.10489146e-02,
         6.90458436e-02,   6.69901995e-02,   6.57043597e-02,
         6.34489496e-02,   5.97460156e-02,   5.49588242e-02,
         5.37623026e-02,   4.91060899e-02,   4.72817458e-02,
         4.37769519e-02,   4.28863996e-02,   4.15552707e-02,
         3.94132506e-02,   3.66891691e-02,   3.52957679e-02,
         2.63133503e-02,   2.36562674e-02,   2.20306930e-02,
         1.97792193e-02,   1.92084122e-02,   1.72914140e-02,
         1.38469942e-02,   9.03810343e-03,   5.36396427e-03,
         4.35992838e-03,   1.93722439e-03]),
 array([[ 0.14106112,  0.14128394,  0.14142418, ...,  0.14151447,
         0.14143501,  0.141619  ],
       [-0.0835001 ,  0.06746195,  0.12969756, ...,  0.10806748,
         0.00940626, -0.08055431],
       [ 0.15681673,  0.11738235,  0.05343403, ...,  0.03378752,
         0.2122755 , -0.09918945],
       ..., 
       [-0.04900778, -0.27861216, -0.15596107, ...,  0.07954928,
         0.02508792,  0.08879695],
       [-0.21695559,  0.03400174,  0.02941616, ..., -0.18761359,
         0.06406925,  0.07357561],
       [-0.07942403, -0.0169131 , -0.13103429, ..., -0.09503029,
         0.15520118, -0.01386502]]))

In [100]:
dot([[3,4],[5,6]], [1,2])


Out[100]:
array([11, 17])

In [ ]: