In [1]:
%pylab inline


Populating the interactive namespace from numpy and matplotlib

In [35]:
import tensorflow as tf
import numpy as np

from tensorflow_hmm import hmm

In [71]:
x = np.arange(0, 100, 1)
truth = np.array((x / 10) % 2, dtype=float)
y = (truth + np.random.random(truth.shape) * 2.5)
y /= np.max(y)

In [72]:
y.shape


Out[72]:
(100,)

In [73]:
plot(truth, label='truth')
plot(y, label='y')
legend()


Out[73]:
<matplotlib.legend.Legend at 0x7f2389fb2310>

In [74]:
# hmm_tf = hmm.HMMTensorflow(np.array([[0.5, 0.5], [0.5, 0.5]]))
hmm_tf = hmm.HMMTensorflow(np.array([[0.9, 0.1], [0.1, 0.9]]))
hmm_np = hmm.HMMNumpy(np.array([[0.9, 0.1], [0.1, 0.9]]))

In [91]:
np.vstack([1-y,y]).T.shape


Out[91]:
(100, 2)

In [93]:
lik(y).shape


Out[93]:
(100, 2)

In [97]:
def lik(y):
    """ given 1d vector of likliehoods length N, return matrix with
    shape (N, 2) where (N, 0) is 1 - y and (N, 1) is y.

    This makes it easy to convert a time series of probabilities
    into 2 states, off/on, for a simple HMM.
    """

    return np.vstack([1 - y, y]).T

In [95]:
np_s, np_scores = hmm_np.viterbi_decode(lik(y))

In [96]:
plot(truth)
plot(np_s)


Out[96]:
[<matplotlib.lines.Line2D at 0x7f23894df610>]

In [78]:
tf_s_graph, tf_scores_graph = hmm_tf.viterbi_decode(lik(y), len(y))

tf_s = tf.Session().run(tf_s_graph)
tf_scores = [tf_scores_graph[0]]
tf_scores.extend([tf.Session().run(g) for g in tf_scores_graph[1:]])
print np.array(tf_scores)


[[ -1.19239089  -1.62706624]
 [ -1.47956684  -3.52672071]
 [ -1.80442684  -5.25622965]
 [ -2.70606219  -4.70667954]
 [ -3.28694184  -5.78374387]
 [ -3.75386324  -6.7821912 ]
 [ -4.29128001  -7.10390999]
 [ -5.16276227  -7.21900274]
 [ -6.27886972  -7.77683669]
 [ -7.02012423  -8.63607564]
 [ -7.5462175   -9.81019514]
 [ -8.71205383 -10.27389558]
 [ -9.36238671 -11.24641816]
 [-10.46161928 -11.8140375 ]
 [-10.97117073 -13.02056353]
 [-13.38637569 -13.23048121]
 [-16.7896302  -13.37350302]
 [-16.59684247 -13.98672444]
 [-17.91365049 -14.31153719]
 [-21.72572821 -14.42294234]
 [-17.67903138 -15.01506741]
 [-17.33744651 -19.05268449]
 [-17.72354878 -20.5654541 ]
 [-18.9142681  -20.43829217]
 [-19.06084353 -23.7531445 ]
 [-20.21090806 -21.79697833]
 [-21.04137105 -22.56452033]
 [-22.31176963 -23.04371763]
 [-23.21080183 -23.75088935]
 [-23.87683644 -24.7021384 ]
 [-24.60671097 -25.57433918]
 [-25.07438382 -26.87064088]
 [-26.26954486 -27.38590168]
 [-27.61845321 -27.83144439]
 [-28.57540926 -28.49320952]
 [-29.28204743 -29.3928885 ]
 [-31.20736284 -29.67502564]
 [-32.00591009 -30.4734938 ]
 [-32.74633168 -31.33367079]
 [-34.94041934 -31.5712432 ]
 [-34.21261579 -32.92360185]
 [-34.45611943 -35.07670396]
 [-34.97784186 -36.25923302]
 [-35.87613152 -36.96701806]
 [-36.71066853 -37.73074954]
 [-37.29970298 -38.79456273]
 [-38.01222243 -39.68715423]
 [-39.28640506 -40.16464093]
 [-40.32150281 -40.77196214]
 [-41.5402973  -41.27545849]
 [-42.07540927 -42.43255954]
 [-43.6691428  -42.79376712]
 [-45.48064125 -43.09948974]
 [-46.70676349 -43.52128437]
 [-46.56262683 -44.27617174]
 [-47.06927163 -45.32908432]
 [-47.57180944 -46.5498417 ]
 [-48.58103841 -47.17439592]
 [-49.93973895 -47.61599795]
 [-51.16591493 -48.06001142]
 [-51.12097877 -48.79728029]
 [-51.64651435 -49.76749352]
 [-52.13923917 -51.00868154]
 [-52.44285105 -52.82975016]
 [-53.1913498  -53.68089995]
 [-53.46859785 -55.63188841]
 [-53.85526862 -57.14290601]
 [-54.74691598 -56.76580189]
 [-56.02310339 -57.2423861 ]
 [-56.91817884 -57.95283565]
 [-59.00153542 -58.20709788]
 [-60.20586713 -58.7177441 ]
 [-61.71566542 -59.1048113 ]
 [-61.80307949 -60.32863997]
 [-62.58492433 -61.14409288]
 [-64.02466143 -61.55505817]
 [-65.0661071  -62.01517506]
 [-67.26189165 -62.17461988]
 [-65.61273506 -62.66748544]
 [-65.65768702 -63.47155459]
 [-66.77723419 -64.02742565]
 [-66.86237537 -65.01761405]
 [-67.00696263 -68.38092024]
 [-67.17698704 -71.25699083]
 [-67.80399478 -70.37984692]
 [-68.36931976 -71.10436834]
 [-68.75691529 -72.07472068]
 [-68.88313796 -74.93973202]
 [-69.65062138 -71.91088791]
 [-70.89712799 -72.33806441]
 [-71.78339253 -73.05589906]
 [-73.31457988 -73.43610398]
 [-73.83551969 -74.62015022]
 [-74.41978978 -75.69167078]
 [-75.18859642 -76.52078876]
 [        -inf -76.62614927]
 [-79.33019617 -77.83817709]
 [-80.24526032 -78.53230662]
 [-82.06641815 -78.83589894]
 [-82.8613648  -79.13794493]]

In [79]:
plot(truth)
plot(tf_s)


Out[79]:
[<matplotlib.lines.Line2D at 0x7f2389de2b50>]

In [80]:
plot(tf_scores)


Out[80]:
[<matplotlib.lines.Line2D at 0x7f238951bc50>,
 <matplotlib.lines.Line2D at 0x7f238951be50>]

In [82]:
g_posterior, _, _ = hmm_tf.forward_backward(lik(y))

tf_posterior = np.concatenate(tf.Session().run(g_posterior))

In [83]:
plot(tf_posterior[:,0])


Out[83]:
[<matplotlib.lines.Line2D at 0x7f237acaba90>]

In [ ]: