In [32]:
import autoreg
import GPy
import numpy as np
from pylab import *
import tables
from IPython.display import HTML
%matplotlib inline

In [33]:
def gen_frames(data, data_mean, data_std, skel, imgpath):
    import os
    import GPy
    a = np.zeros((62,))
    fig = figure(figsize=(8,10))
    ax = fig.add_subplot(111, projection='3d',aspect='equal')
    ax.view_init(elev=20., azim=65)
    fig.tight_layout()
    a[3:] = (data[0])*data_out_std+data_out_mean
    p = GPy.plotting.matplot_dep.visualize.skeleton_show(a, skel ,axes=ax)
    for i in xrange(data.shape[0]):
        a[3:] = (data[i])*data_out_std+data_out_mean
        p.modify(a)
        fig.savefig(os.path.join(imgpath,'%05d'%i+'.png'))

In [34]:
from GPy.util.datasets import *

def cmu_mocap_xyz(subject, train_motions, test_motions=[], sample_every=4, data_set='cmu_mocap'):
    """Load a given subject's training and test motions from the CMU motion capture data."""
    # Load in subject skeleton.
    subject_dir = os.path.join(data_path, data_set)

    # Make sure the data is downloaded.
    all_motions = train_motions + test_motions
    resource = cmu_urls_files(([subject], [all_motions]))
    data_resources[data_set] = data_resources['cmu_mocap_full'].copy()
    data_resources[data_set]['files'] = resource['files']
    data_resources[data_set]['urls'] = resource['urls']
    if resource['urls']:
        download_data(data_set)

    skel = GPy.util.mocap.acclaim_skeleton(os.path.join(subject_dir, subject + '.asf'))
    
    
    for i in range(len(skel.vertices)):
        print i, skel.vertices[i].name     

    
    # 0 root
    # 1 lhipjoint
    # 2 lfemur
    # 3 ltibia
    # 4 lfoot
    # 5 ltoes
    # 6 rhipjoint
    # 7 rfemur
    # 8 rtibia
    # 9 rfoot
    # 10 rtoes
    # 11 lowerback
    # 12 upperback
    # 13 thorax
    # 14 lowerneck
    # 15 upperneck
    # 16 head
    # 17 lclavicle
    # 18 lhumerus
    # 19 lradius
    # 20 lwrist
    # 21 lhand
    # 22 lfingers
    # 23 lthumb
    # 24 rclavicle
    # 25 rhumerus
    # 26 rradius
    # 27 rwrist
    # 28 rhand
    # 29 rfingers
    # 30 rthumb

    
    
    
    
    # Set up labels for each sequence
    exlbls = np.eye(len(train_motions))

    # Load sequences
    tot_length = 0
    temp_Y = []
    temp_Yxyz = []
    temp_lbls = []
    #u_inds = [15,16,17]
    #root_inds = [0,1,2]
    u_inds=[17]
    root_inds = [2]
    
    for i in range(len(train_motions)):
        temp_chan = skel.load_channels(os.path.join(subject_dir, subject + '_' + train_motions[i] + '.amc'))
        #temp_xyz_chan = skel.to_xyz(temp_chan.flatten()) ## A
        # Apparently the above is equiv. to giving temp_chan[0,:]. It's returning a 31 x 3 matrix.
        # I need to do this for every temp_chan[j,:], and serialize the result. The toe should be the
        # very last dimension (I think).
        #temp_xyz_chan = np.zeros((temp_chan.shape[0],93))
        #A -------
        temp_xyz_chan = np.zeros((temp_chan.shape[0],len(u_inds)))                        
        for j in range(temp_xyz_chan.shape[0]):                                 
            foo = skel.to_xyz(temp_chan[j,:]).flatten()
            temp_xyz_chan[j,:] = foo[u_inds] - foo[root_inds]
        #----A
        temp_Y.append(temp_chan[::sample_every, :])
        temp_Yxyz.append(temp_xyz_chan[::sample_every, :]) ## A
        temp_lbls.append(np.tile(exlbls[i, :], (temp_Y[i].shape[0], 1)))
        tot_length += temp_Y[i].shape[0]

    Y = np.zeros((tot_length, temp_Y[0].shape[1]))
    Yxyz = np.zeros((tot_length, temp_Yxyz[0].shape[1])) #A
    lbls = np.zeros((tot_length, temp_lbls[0].shape[1]))

    #pb.plot(temp_Yxyz[-1][:,15:18]-temp_Yxyz[-1][:,0:3],'x-')

    end_ind = 0
    for i in range(len(temp_Y)):
        start_ind = end_ind
        end_ind += temp_Y[i].shape[0]
        Y[start_ind:end_ind, :] = temp_Y[i]
        Yxyz[start_ind:end_ind, :] = temp_Yxyz[i] #A
        lbls[start_ind:end_ind, :] = temp_lbls[i]
    if len(test_motions) > 0:
        temp_Ytest = []
        temp_lblstest = []
        temp_Yxyz_test = []

        testexlbls = np.eye(len(test_motions))
        tot_test_length = 0
        for i in range(len(test_motions)):
            temp_chan = skel.load_channels(os.path.join(subject_dir, subject + '_' + test_motions[i] + '.amc'))
            #A -------
            temp_xyz_chan = np.zeros((temp_chan.shape[0],len(u_inds)))                        
            for j in range(temp_xyz_chan.shape[0]):                                 
                foo = skel.to_xyz(temp_chan[j,:]).flatten()
                temp_xyz_chan[j,:] = foo[u_inds] - foo[root_inds]
            #----A
            temp_Ytest.append(temp_chan[::sample_every, :])
            temp_Yxyz_test.append(temp_xyz_chan[::sample_every, :]) ## A
            temp_lblstest.append(np.tile(testexlbls[i, :], (temp_Ytest[i].shape[0], 1)))
            tot_test_length += temp_Ytest[i].shape[0]

        # Load test data
        Ytest = np.zeros((tot_test_length, temp_Ytest[0].shape[1]))
        Yxyz_test = np.zeros((tot_test_length, temp_Yxyz_test[0].shape[1])) #A
        lblstest = np.zeros((tot_test_length, temp_lblstest[0].shape[1]))

        end_ind = 0
        for i in range(len(temp_Ytest)):
            start_ind = end_ind
            end_ind += temp_Ytest[i].shape[0]
            Ytest[start_ind:end_ind, :] = temp_Ytest[i]
            Yxyz_test[start_ind:end_ind, :] = temp_Yxyz_test[i] #A
            lblstest[start_ind:end_ind, :] = temp_lblstest[i]
    else:
        Ytest = None
        lblstest = None

    info = 'Subject: ' + subject + '. Training motions: '
    for motion in train_motions:
        info += motion + ', '
    info = info[:-2]
    if len(test_motions) > 0:
        info += '. Test motions: '
        for motion in test_motions:
            info += motion + ', '
        info = info[:-2] + '.'
    else:
        info += '.'
    if sample_every != 1:
        info += ' Data is sub-sampled to every ' + str(sample_every) + ' frames.'
    return data_details_return({'Y': Y, 'lbls' : lbls, 'Ytest': Ytest, 'lblstest' : lblstest, 'info': info, 'skel': skel,'Yxyz':Yxyz,'Yxyz_test':Yxyz_test,'u_inds':u_inds,'root_inds':root_inds,'Yxyz_list':temp_Yxyz,'Yxyz_list_test':temp_Yxyz_test}, data_set)

In [35]:
def load_data():
    from GPy.util.datasets import cmu_mocap
    train_motions = ['01', '02', '03', '04', # walking
                '17', '18', '19', '20'] # running
    test_motions = ['05','21']
    data = cmu_mocap('35', train_motions, test_motions, sample_every=4, data_set='cmu_mocap')
    return data

def load_data_xyz():
    train_motions = ['01', '02', '03', '04', # walking
                '17', '18', '19', '20'] # running
    test_motions = ['05','21']
    data = cmu_mocap_xyz('35', train_motions, test_motions, sample_every=4, data_set='cmu_mocap')
    return data

In [36]:
#data = load_data()
data = load_data_xyz()


0 root
1 lhipjoint
2 lfemur
3 ltibia
4 lfoot
5 ltoes
6 rhipjoint
7 rfemur
8 rtibia
9 rfoot
10 rtoes
11 lowerback
12 upperback
13 thorax
14 lowerneck
15 upperneck
16 head
17 lclavicle
18 lhumerus
19 lradius
20 lwrist
21 lhand
22 lfingers
23 lthumb
24 rclavicle
25 rhumerus
26 rradius
27 rwrist
28 rhand
29 rfingers
30 rthumb

In [50]:
y = data['Y']
u = data['Yxyz_list']
u_flat = np.vstack(u)

lbls = data['lbls']
data_out_train = y

#---- REMOVE
datatmp = load_data()
if np.sum(np.abs(y - datatmp['Y'])) != 0:
    print np.sum(y - datatmp['Y'])
#----

data_out_train = data_out_train[:,3:]
data_out_mean  = data_out_train.mean(axis=0)
data_out_std   = data_out_train.std(axis=0)
data_out_train = (data_out_train-data_out_mean)/data_out_std
#data_out_train_list = [data_out_train[np.where(lbls[:,i]==1)[0]][1:] for i in range(lbls.shape[1])]
data_out_train_list = [data_out_train[np.where(lbls[:,i]==1)[0]] for i in range(lbls.shape[1])]


# Create controls
#data_in_train_list = [y[np.where(lbls[:,i]==1)[0]][:,2][1:] - y[np.where(lbls[:,i]==1)[0]][:,2][:-1] for i in range(lbls.shape[1])]
#from scipy.ndimage.filters import gaussian_filter1d
#data_in_train_list = [np.ones(d.shape+(1,))*d.mean() for d in data_in_train_list]

##data_in_train_list = [gaussian_filter1d(d,8.)[:,None] for d in data_in_train_list]
##data_in_train_list = [np.vstack([d[:10],d]) for d in data_in_train_list]


data_in_train_list = u
u_flat_mean = u_flat.mean(axis=0)
u_flat_std = u_flat.std(axis=0)
data_in_train = (u_flat-u_flat_mean)/u_flat_std
    
#data_in_train_list = u

In [45]:
print data_in_train_list[0].shape
print data_out_train_list[0].shape

for i in range(len(data_in_train_list)):
    plt.figure()
    plt.plot(data_in_train_list[i], 'x-')
    plt.title(i)
    print data_in_train_list[i].shape[0]


(90, 1)
(90, 59)
90
102
107
109
42
44
40
41

In [57]:
print y.shape
print data_out_train.shape
print u_flat.shape
print data_in_train.shape


(575, 62)
(575, 59)
(575, 1)
(575, 1)

In [52]:
ytest = data['Ytest']
lblstest = data['lblstest']
u = data['Yxyz_list_test']
data_out_test = ytest

data_out_test= data_out_test[:,3:]
data_out_test = (data_out_test-data_out_mean)/data_out_std

#data_out_test_list = [data_out_test[np.where(lblstest[:,i]==1)[0]][1:] for i in range(lblstest.shape[1])]
data_out_test_list = [data_out_test[np.where(lblstest[:,i]==1)[0]] for i in range(lblstest.shape[1])]

# Create controls
#data_in_test_list = [ytest[np.where(lblstest[:,i]==1)[0]][:,2][1:] - ytest[np.where(lblstest[:,i]==1)[0]][:,2][:-1] for i in range(lblstest.shape[1])]
#data_in_test_list = [np.ones(d.shape+(1,))*d.mean() for d in data_in_test_list]

#data_in_test_list = u

data_in_test_list = u
data_in_test = (u_flat-u_flat_mean)/u_flat_std

In [10]:
len(data_in_test_list)


Out[10]:
2

In [58]:
init_from_saved = False

# Init from saved model
if init_from_saved:
    Q = 100
    
    win_in = 10
    win_out = 10
    
    # create the model
    m = autoreg.DeepAutoreg([0, win_out], data_out_train_list, U=data_in_train_list, U_win=win_in, X_variance=0.05,
                        num_inducing=Q, back_cstr=True, MLP_dims=[500,200], nDims=[data_out_train.shape[1],1],
                         kernels=[GPy.kern.RBF(win_out,ARD=True,inv_l=True, useGPU=False),
                         GPy.kern.RBF(win_out,ARD=True,inv_l=True, useGPU=False)])

#f = tables.openFile('./first_10.h5','r')
#ps = f.root.param_array[:]
#m.param_array[:] = ps
#f.close()
#m._trigger_params_changed()

In [107]:
Q = 100

win_in = 20
win_out = 20

use_controls = True
back_cstr = False



# create the model
if use_controls:
    m = autoreg.DeepAutoreg([0, win_out], data_out_train, U=data_in_train, U_win=win_in, X_variance=0.05,
                        num_inducing=Q, back_cstr=back_cstr, MLP_dims=[200,100], nDims=[data_out_train.shape[1],1],
                         kernels=[GPy.kern.RBF(win_out,ARD=True,inv_l=True, useGPU=False),
                         GPy.kern.RBF(win_out+win_in,ARD=True,inv_l=True, useGPU=False)], inducing_init='random')
else:
    m = autoreg.DeepAutoreg([0, win_out], data_in_train, U=None, U_win=win_in, X_variance=0.05,
                        num_inducing=Q, back_cstr=back_cstr, MLP_dims=[200,100], nDims=[data_out_train.shape[1],1],
                         kernels=[GPy.kern.RBF(win_out,ARD=True,inv_l=True, useGPU=False),
                         GPy.kern.RBF(win_out,ARD=True,inv_l=True, useGPU=False)])

In [108]:
if not back_cstr:
    pp = GPy.util.pca.PCA(data_out_train)
    m.layer_1.Xs_flat[0].mean[:] = pp.project(data_out_train, 1)
    perm = np.random.permutation(range(m.layer_1.X.mean.shape[0]))
    m._trigger_params_changed()
    m.layer_1.Z[:] = m.layer_1.X.mean[perm[0:Q],:].values.copy()
    m._trigger_params_changed()


Out[108]:
[<matplotlib.lines.Line2D at 0xa0e7decc>]

In [110]:
plt.plot(data_in_train, 'x-')
plt.plot(m.layer_1.Xs_flat[0].mean[:], 'ro-')


Out[110]:
[<matplotlib.lines.Line2D at 0xa5892acc>]

In [ ]:
# initialization
for i in range(m.nLayers):
    if not back_cstr:
        m.layers[i].kern.inv_l[:]  = ((m.layers[i].X.mean.values.max(0)-m.layers[i].X.mean.values.min(0))/np.sqrt(2.))
    else:
        m.layers[i].kern.inv_l[:]  = 1./9.#((m.layers[i].X.mean.values.max(0)-m.layers[i].X.mean.values.min(0))/np.sqrt(2.))
    m.layers[i].likelihood.variance[:] = 0.01*data_out_train.var()
    m.layers[i].kern.variance.fix(warning=False)
    m.layers[i].likelihood.fix(warning=False)

if not back_cstr:
    m.layer_1.likelihood.variance[:] = 0.01
else:
    m.layer_1.likelihood.variance[:] = 0.01 * m.layer_1.Xs_flat[0].var()
print m.layer_0.Y.var()
print m

plt.plot(data_in_train, 'x-')

In [104]:
# optimization
m.optimize('bfgs',messages=1,max_iters=200)
for i in range(m.nLayers):
    #m.layers[i].kern.variance.constrain_bounded(0.06, 100, warning=False)
    m.layers[i].kern.variance.constrain_positive(warning=False)
#m.optimize('bfgs',messages=1,max_iters=500)
for i in range(m.nLayers):
    m.layers[i].likelihood.constrain_positive(warning=False)
m.optimize('bfgs',messages=1,max_iters=10000)


---------------------------------------------------------------------------
KeyboardInterrupt                         Traceback (most recent call last)
<ipython-input-104-01d018ca9eaa> in <module>()
      7 for i in range(m.nLayers):
      8     m.layers[i].likelihood.constrain_positive(warning=False)
----> 9 m.optimize('bfgs',messages=1,max_iters=10000)

/home/andreas/SoftwareNotDrpBox/GPy/GPy/core/model.pyc in optimize(self, optimizer, start, messages, max_iters, ipython_notebook, clear_after_finish, **kwargs)
    259 
    260         with VerboseOptimization(self, opt, maxiters=max_iters, verbose=messages, ipython_notebook=ipython_notebook, clear_after_finish=clear_after_finish) as vo:
--> 261             opt.run(f_fp=self._objective_grads, f=self._objective, fp=self._grads)
    262             vo.finish(opt)
    263 

/home/andreas/SoftwareNotDrpBox/GPy/GPy/inference/optimization/optimization.pyc in run(self, **kwargs)
     50     def run(self, **kwargs):
     51         start = dt.datetime.now()
---> 52         self.opt(**kwargs)
     53         end = dt.datetime.now()
     54         self.time = str(end - start)

/home/andreas/SoftwareNotDrpBox/GPy/GPy/inference/optimization/optimization.pyc in opt(self, f_fp, f, fp)
    125 
    126         opt_result = optimize.fmin_l_bfgs_b(f_fp, self.x_init, iprint=iprint,
--> 127                                             maxfun=self.max_iters, **opt_dict)
    128         self.x_opt = opt_result[0]
    129         self.f_opt = f_fp(self.x_opt)[0]

/home/andreas/anaconda/lib/python2.7/site-packages/scipy/optimize/lbfgsb.pyc in fmin_l_bfgs_b(func, x0, fprime, args, approx_grad, bounds, m, factr, pgtol, epsilon, iprint, maxfun, maxiter, disp, callback)
    186 
    187     res = _minimize_lbfgsb(fun, x0, args=args, jac=jac, bounds=bounds,
--> 188                            **opts)
    189     d = {'grad': res['jac'],
    190          'task': res['message'],

/home/andreas/anaconda/lib/python2.7/site-packages/scipy/optimize/lbfgsb.pyc in _minimize_lbfgsb(fun, x0, args, jac, bounds, disp, maxcor, ftol, gtol, eps, maxfun, maxiter, iprint, callback, **unknown_options)
    318                 # minimization routine wants f and g at the current x
    319                 # Overwrite f and g:
--> 320                 f, g = func_and_grad(x)
    321         elif task_str.startswith(b'NEW_X'):
    322             # new iteration

/home/andreas/anaconda/lib/python2.7/site-packages/scipy/optimize/lbfgsb.pyc in func_and_grad(x)
    269     else:
    270         def func_and_grad(x):
--> 271             f = fun(x, *args)
    272             g = jac(x, *args)
    273             return f, g

/home/andreas/anaconda/lib/python2.7/site-packages/scipy/optimize/optimize.pyc in function_wrapper(*wrapper_args)
    283     def function_wrapper(*wrapper_args):
    284         ncalls[0] += 1
--> 285         return function(*(wrapper_args + args))
    286 
    287     return ncalls, function_wrapper

/home/andreas/anaconda/lib/python2.7/site-packages/scipy/optimize/optimize.pyc in __call__(self, x, *args)
     61     def __call__(self, x, *args):
     62         self.x = numpy.asarray(x).copy()
---> 63         fg = self.fun(x, *args)
     64         self.jac = fg[1]
     65         return fg[0]

/home/andreas/SoftwareNotDrpBox/GPy/GPy/core/model.pyc in _objective_grads(self, x)
    204     def _objective_grads(self, x):
    205         try:
--> 206             self.optimizer_array = x
    207             obj_f, self.obj_grads = self.objective_function(), self._transform_gradients(self.objective_function_gradients())
    208             self._fail_count = 0

/home/andreas/SoftwareNotDrpBox/GPy/GPy/core/parameterization/parameterized.pyc in __setattr__(self, name, val)
    317             except AttributeError as a:
    318                 raise
--> 319         return object.__setattr__(self, name, val);
    320 
    321     #===========================================================================

/home/andreas/SoftwareNotDrpBox/GPy/GPy/core/parameterization/parameter_core.pyc in optimizer_array(self, p)
    695 
    696         self._optimizer_copy_transformed = False
--> 697         self.trigger_update()
    698 
    699     def _get_params_transformed(self):

/home/andreas/SoftwareNotDrpBox/GPy/GPy/core/parameterization/updateable.pyc in trigger_update(self, trigger_parent)
     52             #print "Warning: updates are off, updating the model will do nothing"
     53             return
---> 54         self._trigger_params_changed(trigger_parent)

/home/andreas/SoftwareNotDrpBox/GPy/GPy/core/parameterization/parameter_core.pyc in _trigger_params_changed(self, trigger_parent)
    710         If trigger_parent is True, we will tell the parent, otherwise not.
    711         """
--> 712         [p._trigger_params_changed(trigger_parent=False) for p in self.parameters if not p.is_fixed]
    713         self.notify_observers(None, None if trigger_parent else -np.inf)
    714 

/home/andreas/SoftwareNotDrpBox/GPy/GPy/core/parameterization/parameter_core.pyc in _trigger_params_changed(self, trigger_parent)
    711         """
    712         [p._trigger_params_changed(trigger_parent=False) for p in self.parameters if not p.is_fixed]
--> 713         self.notify_observers(None, None if trigger_parent else -np.inf)
    714 
    715     def _size_transformed(self):

/home/andreas/SoftwareNotDrpBox/GPy/GPy/core/parameterization/observable.pyc in notify_observers(self, which, min_priority)
     65                     if p <= min_priority:
     66                         break
---> 67                     callble(self, which=which)
     68 
     69     def change_priority(self, observer, callble, priority):

/home/andreas/SoftwareNotDrpBox/GPy/GPy/core/parameterization/parameter_core.pyc in _parameters_changed_notification(self, me, which)
   1041         """
   1042         self._optimizer_copy_transformed = False # tells the optimizer array to update on next request
-> 1043         self.parameters_changed()
   1044     def _pass_through_notify_observers(self, me, which=None):
   1045         self.notify_observers(which=which)

/home/andreas/Dropbox/_PhD/Software/github/Autoreg/autoreg/layers.pyc in parameters_changed(self)
    187     def parameters_changed(self):
    188         self._update_X()
--> 189         super(Layer,self).parameters_changed()
    190         self._update_qX_gradients()
    191         self._prepare_gradients()

/home/andreas/SoftwareNotDrpBox/GPy/GPy/core/sparse_gp.pyc in parameters_changed(self)
     77 
     78     def parameters_changed(self):
---> 79         self.posterior, self._log_marginal_likelihood, self.grad_dict = self.inference_method.inference(self.kern, self.X, self.Z, self.likelihood, self.Y, self.Y_metadata)
     80         self._update_gradients()
     81 

/home/andreas/Dropbox/_PhD/Software/github/Autoreg/autoreg/inference/vardtc.pyc in inference(self, kern, X, Z, likelihood, Y, Y_metadata, Lm, dL_dKmm)
    102         beta = 1./np.fmax(likelihood.variance, 1e-6)
    103 
--> 104         psi0, psi2, YRY, psi1, psi1Y, Shalf, psi1S = self.gatherPsiStat(kern, X, Z, Y, beta, uncertain_inputs)
    105 
    106         #======================================================================

/home/andreas/Dropbox/_PhD/Software/github/Autoreg/autoreg/inference/vardtc.pyc in gatherPsiStat(self, kern, X, Z, Y, beta, uncertain_inputs)
     59             psi0 = kern.psi0(Z, X)
     60             psi1 = kern.psi1(Z, X)*beta
---> 61             psi2 = kern.psi2(Z, X)*beta
     62         else:
     63             psi0 = kern.Kdiag(X)

/home/andreas/SoftwareNotDrpBox/GPy/GPy/kern/src/kernel_slice_operations.pyc in wrap(self, Z, variational_posterior)
    138     def wrap(self, Z, variational_posterior):
    139         with _Slice_wrap(self, Z, variational_posterior) as s:
--> 140             ret = f(self, s.X, s.X2)
    141         return ret
    142     return wrap

/home/andreas/SoftwareNotDrpBox/GPy/GPy/kern/src/rbf.pyc in psi2(self, Z, variational_posterior)
     69 
     70     def psi2(self, Z, variational_posterior):
---> 71         return self.psicomp.psicomputations(self, Z, variational_posterior, return_psi2_n=False)[2]
     72 
     73     def psi2n(self, Z, variational_posterior):

/home/andreas/SoftwareNotDrpBox/GPy/GPy/util/caching.pyc in __call__(self, *args, **kwargs)
    182         except KeyError:
    183             cacher = caches[self.f] = Cacher(self.f, self.limit, self.ignore_args, self.force_kwargs)
--> 184         return cacher(*args, **kwargs)
    185 
    186 class Cache_this(object):

/home/andreas/SoftwareNotDrpBox/GPy/GPy/util/caching.pyc in __call__(self, *args, **kw)
    118             # 3: This is when we never saw this chache_id:
    119             self.ensure_cache_length(cache_id)
--> 120             self.add_to_cache(cache_id, inputs, self.operation(*args, **kw))
    121         except:
    122             self.reset()

/home/andreas/SoftwareNotDrpBox/GPy/GPy/kern/src/psi_comp/__init__.pyc in psicomputations(self, kern, Z, variational_posterior, return_psi2_n)
     26         variance, lengthscale = kern.variance, kern.lengthscale
     27         if isinstance(variational_posterior, variational.NormalPosterior):
---> 28             return rbf_psi_comp.psicomputations(variance, lengthscale, Z, variational_posterior, return_psi2_n=return_psi2_n)
     29         elif isinstance(variational_posterior, variational.SpikeAndSlabPosterior):
     30             return ssrbf_psi_comp.psicomputations(variance, lengthscale, Z, variational_posterior)

/home/andreas/SoftwareNotDrpBox/GPy/GPy/kern/src/psi_comp/rbf_psi_comp.pyc in psicomputations(variance, lengthscale, Z, variational_posterior, return_psi2_n)
     17     psi1 = _psi1computations(variance, lengthscale, Z, mu, S)
     18     psi2 = _psi2computations(variance, lengthscale, Z, mu, S)
---> 19     if not return_psi2_n: psi2 = psi2.sum(axis=0)
     20     return psi0, psi1, psi2
     21 

/home/andreas/anaconda/lib/python2.7/site-packages/numpy/core/_methods.pyc in _sum(a, axis, dtype, out, keepdims)
     30 
     31 def _sum(a, axis=None, dtype=None, out=None, keepdims=False):
---> 32     return umr_sum(a, axis, dtype, out, keepdims)
     33 
     34 def _prod(a, axis=None, dtype=None, out=None, keepdims=False):

KeyboardInterrupt: 

In [ ]:
print m.layer_0.Y.var()

print m

In [ ]:
m.save('walk_run_2.h5')

In [ ]:
# for i in range(m.nLayers):
#     m.layers[i].likelihood.constrain_positive(warning=False)
m.optimize('bfgs',messages=1,max_iters=100000)
print m

In [ ]:


In [ ]:
#b = data_in_train.copy()
#b[:] = data_in_train.mean()
#pd = m.freerun(U=b, m_match=False)

# Test on training data
ts_inp = data_in_train_list[0].copy()   # data_in_test_list[0]
ts_out = data_out_train_list[0].copy()  # data_out_test_list[0]

pd = m.freerun(U=ts_inp, m_match=False)

mean_pred = m.layer_0.Y.mean(0)*data_out_std+data_out_mean

In [ ]:
print pd[0,:]
print pd[0,:]*data_out_std+data_out_mean
print ts_out[0,:]
mean_pred.shape

In [ ]:
for i in range(pd.shape[0]):
    pd[i,:] = pd[i,:]*data_out_std+data_out_mean

In [ ]:
#for i in range(1):
#    plt.figure()
#    plt.plot(ts_out[:,i])
#    plt.figure()
#    plt.plot(pd[:,i])

print pd[0:50:3,28]
print mean_pred[28]

In [ ]:
_=plot(pd[:,0])
_=plot(pd[:,1])
_=plot(pd[:,4])
#_=plot(data_out_train[win_out:100,0],'r')
#_=plot(data_out_train[win_out:100,1],'y')

In [ ]:
!rm imgs/*.png
gen_frames(pd,data_out_mean, data_out_std, data['skel'],'./imgs')

In [ ]:
! avconv -y -r 10 -qscale 2 -i ./imgs/%05d.png pred_walk_run_mid.mp4

In [ ]:
HTML("""
<video width="480" height="480" controls>
  <source src="pred_walk_run.mp4" type="video/mp4">
</video>
""")

In [ ]:
HTML("""
<video width="480" height="480" controls>
  <source src="pred_walk_run.mp4" type="video/mp4">
</video>
""")

In [ ]:
HTML("""
<video width="480" height="480" controls>
  <source src="pred_walk_run_mid.mp4" type="video/mp4">
</video>
""")

In [ ]:
m.layer_1.X.mean.values

In [ ]:
m.layer_1.Us_flat[0].variance

In [ ]:
m.layer_1.kern.lengthscale

In [ ]:
m.layer_0.kern.lengthscale

In [ ]:
m.layer_1.X.mean.std(0)

In [ ]:
plot(data_in_train_list[0])
plot(data_in_train_list[6])

In [ ]:
pd = m.freerun(U=np.vstack([data_in_train_list[0],data_in_train_list[5],data_in_train_list[6],data_in_train_list[7]]),m_match=False)

In [ ]: