In [1]:
import numpy
import matplotlib.pyplot as plt
import pickle as pkl
import numpy as np
%matplotlib inline

In [23]:
losses_v=pkl.load(open('losses/lossv.pkl','rb'))

In [45]:
loss=pkl.load(open('losses/3s_18b_30l_1024h_19d_512z_512zm_defdef4.pkl','rb'))

In [24]:
losses_e=pkl.load(open('losses/losse.pkl','rb'))

In [40]:
losses=pkl.load(open('losses/loss.pkl','rb'))

In [7]:
ctc=pkl.load(open('losses/1s_500b_30l_1024h_19d_512z_512zm_ctcclipvarvaedef.pkl','rb'))

In [111]:
len(losses_e)


Out[111]:
62075

In [29]:
total_batch=(158195/(128/2))

In [30]:
max([x/float(total_batch) for x in range(len(losses_e))])


Out[30]:
25.121003642250102

In [31]:
min(losses_e)


Out[31]:
2.2984068

In [32]:
def moving_average(a, n=15) :
    ret = np.cumsum(a, dtype=float)
    ret[n:] = ret[n:] - ret[:-n]
    return ret[n - 1:] / n

In [33]:
smooth_loss_e=moving_average(losses_e)

In [34]:
smooth_loss_v=moving_average(losses_v)

In [38]:
smooth_loss=moving_average(losses)

In [39]:
plt.plot([x/float(total_batch) for x in range(len(smooth_loss_e))],smooth_loss_e,label='W2V Embeddings')
plt.plot([x/float(total_batch) for x in range(len(smooth_loss_v))],smooth_loss_v,label='E2E Training')
plt.plot([x*2/float(total_batch) for x in range(len(smooth_loss))],smooth_loss,label='VAE2Def Embeddings')
plt.title('Flickr-30k Caption Reconstruction Loss')
plt.xlabel('epochs')
plt.ylabel('Softmax Cross Entropy Loss')
plt.legend()


Out[39]:
<matplotlib.legend.Legend at 0x7f6538a05650>

In [123]:
smooth_ctc=moving_average(ctc)

In [126]:
plt.plot(ctc)


Out[126]:
[<matplotlib.lines.Line2D at 0x7ff84bb88d50>]

In [125]:
ctc


Out[125]:
[-0.68619465827941895,
 -0.5740787422223268,
 -0.46343051827569476,
 -0.44442209447493575,
 -0.44236837467397266,
 -0.44231022432656375]

In [42]:
smooth_loss=moving_average(loss,3)

In [43]:
plt.plot(smooth_loss)


Out[43]:
[<matplotlib.lines.Line2D at 0x7f6537daded0>]

In [46]:
plt.plot(loss)


Out[46]:
[<matplotlib.lines.Line2D at 0x7f653887e5d0>]

In [ ]: