In [1]:
import mutils.io as mio
dat = mio.mload('tmp.dat')
sdt_kin_r = dat['sdt_kin_r']
sdt_kin_r_noIC = dat['sdt_kin_r_noIC']
sdt_param_r = dat['sdt_param_r']
In [2]:
## TESTING - DELETE ME!
#sdt_kin_r.shape
s_kin_reord_r = hstack([sdt_kin_r[:, [0,20,21]], sdt_kin_r_noIC])
p1 = dot(sdt_param_r.T, pinv(s_kin_reord_r.T))
# alternatively, compute predictors using the lstsq-function
#p2, _,_, _ = lstsq(sdt_kin_r, sdt_param_r, )
#p2 = p2.T
#allclose(p1, p2) # results in "True"
u,s,v = svd(p1, full_matrices = False)
# the rows of v span the space where our features live
In [7]:
qv,rv = qr(v) # rv spans the same subspace as v
# TEST:
#prv = dot(rv.T, rv) # projector on "rv" subspace
#print allclose(prv, dot(prv, prv)) # True -> is a projector
#print allclose(dot(prv, v.T), v.T) # True -> projects into the same subspace as v
rv2 = rv.copy()
# make "identity" 3x3 block
rv2[1,:] -= rv2[2, :] * rv2[1,2] / rv2[2,2]
rv2[0,:] -= rv2[1, :] * rv2[0,1] / rv2[1,1]
rv2[0,:] -= rv2[2, :] * rv2[0,2] / rv2[2,2]
# projector on last two rows (-space), and projector to orth. kompl.
p2 = dot(rv2[3:,:].T, rv2[3:, :])
ap = eye(41) - p2
# can we remove anything from the first 3 components?
rv3 = rv2.copy()
rv3[:3,:] = dot(ap,rv3[:3,:].T).T
print "are data unchanged?", allclose(rv2, rv3)
# visualize factors
figure()
pcolor(rv2)
colorbar()
clim(-1,1)
In [50]:
# this variant does not incorporate bootstrap!
#dat = mio.mload('tmp.dat')
#sdt_kin_r = dat['sdt_kin_r']
#sdt_kin_r_noIC = dat['sdt_kin_r_noIC']
#sdt_param_r = dat['sdt_param_r']
#s_kin_reord_r
s_kin_CoM = s_kin_reord_r[:, :3]
s_kin_noCoM = s_kin_reord_r[:, 3:]
p_c = dot(sdt_param_r.T, pinv(s_kin_CoM.T)) # predictor matrix for CoM state
prediction_c = dot(p_c, s_kin_CoM.T).T
remainder = sdt_param_r - prediction_c
# compute the annihilator matrix for prediction_c
# this
#M = dot(p_c, pinv(p_c)) # projector on columnspace of p_c
#remainder2 = dot((eye(5) - M), sdt_param_r.T).T
In [59]:
# TODO: use constrained least squares!
# Here: use the version from wikipedia :)
Y = sdt_param_r
X = s_kin_reord_r
print Y.shape, X.shape
beta = dot(pinv(X), Y)
# formulate constraint in this syntax:
# H0: Q.T beta = c
Q = vstack([zeros(3,3), one((38,3))])
print dot(Q.T, beta)
betaC = 0
#beta1 = hstack([eye(3), zeros(3,38)])
#beta2 =
In [51]:
import mutils.statistics as st
figure(12, figsize=(16,8))
clf()
fac_vars = st.find_factors(s_kin_noCoM.T, remainder.T)
facs = st.find_factors(s_kin_noCoM.T, remainder.T, 2)
subplot(2,1,1)
plot(fac_vars, 'kd--')
title('how much of the predictable variance\ncan be predicted using k factors')
xlabel('# of factors')
ylabel('relative predictable variance')
ylim(0,1)
gca().set_xticks(arange(5))
gca().set_xticklabels(arange(5) + 1)
subplot(2,1,2)
pcolor(facs.T), colorbar(), clim(-1,1)
noCoM_lbls = ['r_anl_y - com_y', 'r_anl_x - com_x', 'r_mtv_z - r_anl_z', 'r_mtv_x - r_anl_x', 'r_kne_y - com_y',
'r_elb_y - com_y', 'r_elb_x - com_x', 'r_wrl_z - com_z', 'r_wrl_x - com_x', 'cvii_y - com_y',
'l_anl_y - com_y', 'l_anl_x - com_x', 'l_mtv_z - l_anl_z', 'l_mtv_x - l_anl_x', 'l_kne_y - com_y',
'l_elb_y - com_y', 'l_elb_x - com_x', 'l_wrl_z - com_z', 'l_wrl_x - com_x', ]
all_lbls = noCoM_lbls + ['v_' + x for x in noCoM_lbls]
gca().set_yticks([.5, 1.5])
gca().set_xticks(arange(len(all_lbls)) + .5)
_ = gca().set_xticklabels(all_lbls, rotation=90)
title('weight on factors')
Out[51]:
In [ ]: