Experiment 5: TRo Journal


Compare the predictive performance of using MRD with various feature representations for the feature space and the observation space.

In this Ipython notebook, the predictive performance of 3 different feature representations of each observation space are evaluated. The metrics for evaluation are RMS error, normalized RMS error and pearson correlation.


In [ ]:
# import the modules
import GPy
import csv
import numpy as np
import cPickle as pickle
from matplotlib import pyplot as plt

%matplotlib notebook

Model Training



In [ ]:
# set the overall parameters for bgplvm
qDim = 15

# dimensions for kinect and mocap
qDims = [10,5]
qDVals = [np.arange(0,qDims[0]), np.arange(qDims[0],qDims[0]+qDims[1])]

# set the number of inducing inputs
nInducing = 100

# create directory for results
dName = '../Models/Exp5'
if not os.path.exists(dName):
    os.makedirs(dName)

In [ ]:
# main loop
samplingFreq = 2
nTrials = len(names)
kinectKeys = ['ESF','Color','Depth']
mocapKeys = ['Marker','CircleMarker','CircleParam']

# optimization variables
SNR0 = 1000
SNR1 = 100
trainIters = 1000
initMod0Iters = 500
initMod1Iters = 500
initVardistIters = 1000

In [ ]:
# loop over the kinect keys
kinectExts = ['E','CO','D']
kinectDims = [640,2500,2500]

for kinectKey, kinectExt, kinectDim in zip(kinectKeys, kinectExts, kinectDims):
    mocapKey = 'TopCoord'
    keys = [kinectKey,mocapKey]

    # model name
    expName = '%sT' % (kinectExt)

    # YNames variable
    YNames = [kinectKey, mocapKey]
    
    dims = [kinectDim, 8]

    print 'Modality: %s' % (kinectKey)
    print kinectKey, kinectExt, kinectDim
    
    for K in range(nTrials):
        valData = {}
        testData = {}
        trainData = {}

        testInd = K
        trainInd = range(nTrials)
        del trainInd[K]
        valInd = (K+1)%nTrials
    
        print 'Cycle:%d' % (K+1)
        print valInd, testInd, trainInd
    
        for key,dim in zip(keys,dims):
            vaD = np.empty((0,dim))
            trD = np.empty((0,dim))
            teD = np.empty((0,dim))
    
            for ind in trainInd:
                trD = np.concatenate((trD,Data[names[ind]][key]),axis=0)
        
            vaD = np.concatenate((vaD,Data[names[valInd]][key]),axis=0)
            teD = np.concatenate((teD,Data[names[testInd]][key]),axis=0)

            valData[key] = vaD
            testData[key] = teD
            trainData[key] = trD
        
        # choosing the training dataset
        nSamples = trainData[kinectKey].shape[0]
        trainList = [trainData[kinectKey], trainData[mocapKey]]
    
        # initializing the latent space 
        scales = []
        inputX = np.zeros((nSamples,qDim))

        for qD,qDV,Y in zip(qDims, qDVals, trainList):
            x,frcs = GPy.util.initialization.initialize_latent('PCA',qD, Y)
            scales.extend(frcs)
            inputX[:,qDV] = x
    
        scales = np.asarray(scales)
        print scales
    
        # setting up the kernel
        mrdKernels = []

        for Y in trainList:
            mrdKernels.append(GPy.kern.RBF(qDim, variance=1., lengthscale=1./scales, ARD = True))
        
        # initializing MRD model
        mrdModel = GPy.models.MRD(trainList, input_dim=qDim, num_inducing=nInducing, kernel=mrdKernels, 
                                  X=inputX, name='%s%d' % (expName,K))

        print 'Setup Model!'
    
        # Phase 1: Optimizaition by fixing variance parameters
        var0 = mrdModel.Y0.Y.var()
        var1 = mrdModel.Y1.Y.var()

        mrdModel.Y0.rbf.variance.fix(var0)
        mrdModel.Y1.rbf.variance.fix(var1)

        mrdModel.Y0.Gaussian_noise.variance.fix(var0/SNR0)
        mrdModel.Y1.Gaussian_noise.variance.fix(var1/SNR1)

        mrdModel.optimize(messages=True, max_iters=initVardistIters)
    
        # Phase 2: Optimize each model individually

        # constrain space 0
        mrdModel.Y1.constrain_fixed()
        mrdModel.optimize(messages=True, max_iters=initMod0Iters)

        # constrain space 1
        mrdModel.Y0.constrain_fixed()
        mrdModel.Y1.unconstrain_fixed()
        mrdModel.Y1.rbf.variance.fix(var1)
        mrdModel.Y1.Gaussian_noise.variance.fix(var1/SNR1)
        mrdModel.optimize(messages=True, max_iters=initMod1Iters)
    
        # Phase 3: Optimize the model without any constraints

        # training without constraints
        mrdModel.Y0.unconstrain_fixed()
        mrdModel.Y1.unconstrain_fixed()
        mrdModel.optimize(messages=True, max_iters=trainIters)
    
        print 'Training Done!'
    
        # plot the learned model
        mrdModel.plot_scales(sharex=True,sharey=False,titles=YNames)
        mrdModel.plot_latent(which_indices=[0,1])
    
        # save the model
        mrdModel = pickle.dump(mrdModel, open('../Models/Exp5/%s%d.p' % (expName,K+1),'wb'))
    
        print 'Saving Done!'

In [ ]:
# loop over the mocap keys
mocapExts = ['M','CM','CP']
for mocapKey, mocapExt in zip(mocapKeys, mocapExts):
    kinectKey = 'Cloud'
    keys = [kinectKey,mocapKey]

    # model name
    expName = 'C%s' % (mocapExt)

    # YNames variable
    YNames = [kinectKey, mocapKey]

    for K in range(nTrials):
        valData = {}
        testData = {}
        trainData = {}

        testInd = K
        trainInd = range(nTrials)
        del trainInd[K]
        valInd = (K+1)%nTrials
    
        print 'Cycle:%d' % (K+1)
        print valInd, testInd, trainInd
    
        for key,dim in zip(keys,dims):
            vaD = np.empty((0,dim))
            trD = np.empty((0,dim))
            teD = np.empty((0,dim))
    
            for ind in trainInd:
                trD = np.concatenate((trD,Data[names[ind]][key][::samplingFreq,:]),axis=0)
        
            vaD = np.concatenate((vaD,Data[names[valInd]][key]),axis=0)
            teD = np.concatenate((teD,Data[names[testInd]][key]),axis=0)

            valData[key] = vaD
            testData[key] = teD
            trainData[key] = trD
        
        # choosing the training dataset
        nSamples = trainData[kinectKey].shape[0]
        trainList = [trainData[kinectKey], trainData[mocapKey]]
    
        # initializing the latent space 
        scales = []
        inputX = np.zeros((nSamples,qDim))

        for qD,qDV,Y in zip(qDims, qDVals, trainList):
            x,frcs = GPy.util.initialization.initialize_latent('PCA',qD, Y)
            scales.extend(frcs)
            inputX[:,qDV] = x
    
        scales = np.asarray(scales)
        print scales
    
        # setting up the kernel
        mrdKernels = []

        for Y in trainList:
            mrdKernels.append(GPy.kern.RBF(qDim, variance=1., lengthscale=1./scales, ARD = True))
        
        # initializing MRD model
        mrdModel = GPy.models.MRD(trainList, input_dim=qDim, num_inducing=nInducing, kernel=mrdKernels, 
                                  X=inputX, name='%s%d' % (expName,K))

        print 'Setup Model!'
    
        # Phase 1: Optimizaition by fixing variance parameters
        var0 = mrdModel.Y0.Y.var()
        var1 = mrdModel.Y1.Y.var()

        mrdModel.Y0.rbf.variance.fix(var0)
        mrdModel.Y1.rbf.variance.fix(var1)

        mrdModel.Y0.Gaussian_noise.variance.fix(var0/SNR0)
        mrdModel.Y1.Gaussian_noise.variance.fix(var1/SNR1)

        mrdModel.optimize(messages=True, max_iters=initVardistIters)
    
        # Phase 2: Optimize each model individually

        # constrain space 0
        mrdModel.Y1.constrain_fixed()
        mrdModel.optimize(messages=True, max_iters=initMod0Iters)

        # constrain space 1
        mrdModel.Y0.constrain_fixed()
        mrdModel.Y1.unconstrain_fixed()
        mrdModel.Y1.rbf.variance.fix(var1)
        mrdModel.Y1.Gaussian_noise.variance.fix(var1/SNR1)
        mrdModel.optimize(messages=True, max_iters=initMod1Iters)
    
        # Phase 3: Optimize the model without any constraints

        # training without constraints
        mrdModel.Y0.unconstrain_fixed()
        mrdModel.Y1.unconstrain_fixed()
        mrdModel.optimize(messages=True, max_iters=trainIters)
    
        print 'Training Done!'
    
        # plot the learned model
        mrdModel.plot_scales(sharex=True,sharey=False,titles=YNames)
        mrdModel.plot_latent(which_indices=[0,1])
    
        # save the model
        mrdModel = pickle.dump(mrdModel, open('../Models/Exp5/%s%d.p' % (expName,K+1),'wb'))
    
        print 'Saving Done!'