Example is taken from Section 12.5.3, Machine Learning: A Probabilistic Perspective by Kevin Murphy.
In [1]:
from symgp import *
from sympy import *
from IPython.display import display, Math, Latex
In [2]:
# Shapes
D_x, D_y, L_o, L_x, L_y = symbols('D_x, D_y, L_o L_x L_y')
# Variables
x, y, z_s, z_x, z_y = utils.variables('x y z_{s} z_{x} z_{y}', [D_x, D_y, L_o, L_x, L_y])
# Constants
B_x, W_x, mu_x, B_y, W_y, mu_y = utils.constants('B_{x} W_{x} mu_{x} B_{y} W_{y} mu_{y}',
[(D_x,L_x), (D_x,L_o), D_x, (D_y,L_y), (D_y,L_o), D_y])
sig = symbols('\u03c3') # Noise standard deviation
In [3]:
# p(z_s), p(z_x), p(z_y)
p_zs = MVG([z_s],mean=ZeroMatrix(L_o,1),cov=Identity(L_o))
p_zx = MVG([z_x],mean=ZeroMatrix(L_x,1),cov=Identity(L_x))
p_zy = MVG([z_y],mean=ZeroMatrix(L_y,1),cov=Identity(L_y))
display(Latex(utils.matLatex(p_zs)))
display(Latex(utils.matLatex(p_zx)))
display(Latex(utils.matLatex(p_zx)))
In [4]:
# p(z)
p_z = p_zs*p_zx*p_zy
display(Latex(utils.matLatex(p_z)))
In [5]:
# p(x|z)
p_x_g_z = MVG([x],mean=B_x*z_x + W_x*z_s + mu_x,cov=sig**2*Identity(D_x),cond_vars=[z_x,z_s])
display(Latex(utils.matLatex(p_x_g_z)))
In [6]:
# p(y|z)
p_y_g_z = MVG([y],mean=B_y*z_y + W_y*z_s + mu_y,cov=sig**2*Identity(D_y),cond_vars=[z_y,z_s])
display(Latex(utils.matLatex(p_y_g_z)))
In [7]:
# p(v|z) (p(x,y|z_s,z_x,z_y)) We denote v = (x;y) and z = (z_s;z_x;z_y)
p_v_g_z = p_x_g_z*p_y_g_z
display(Latex(utils.matLatex(p_v_g_z)))
In [8]:
# p(v,z) (p(x,y,z_s,z_x,z_y))
p_v_z = p_v_g_z*p_z
display(Latex(utils.matLatex(p_v_z)))
In [9]:
# p(v) (p(x,y))
p_v = p_v_z.marginalise([z_s,z_x,z_y])
display(Latex(utils.matLatex(p_v)))