In [1]:
import numpy
import matplotlib.pyplot as plt
import pickle as pkl
import numpy as np
%matplotlib inline
In [23]:
losses_v=pkl.load(open('losses/lossv.pkl','rb'))
In [45]:
loss=pkl.load(open('losses/3s_18b_30l_1024h_19d_512z_512zm_defdef4.pkl','rb'))
In [24]:
losses_e=pkl.load(open('losses/losse.pkl','rb'))
In [40]:
losses=pkl.load(open('losses/loss.pkl','rb'))
In [7]:
ctc=pkl.load(open('losses/1s_500b_30l_1024h_19d_512z_512zm_ctcclipvarvaedef.pkl','rb'))
In [111]:
len(losses_e)
Out[111]:
In [29]:
total_batch=(158195/(128/2))
In [30]:
max([x/float(total_batch) for x in range(len(losses_e))])
Out[30]:
In [31]:
min(losses_e)
Out[31]:
In [32]:
def moving_average(a, n=15) :
ret = np.cumsum(a, dtype=float)
ret[n:] = ret[n:] - ret[:-n]
return ret[n - 1:] / n
In [33]:
smooth_loss_e=moving_average(losses_e)
In [34]:
smooth_loss_v=moving_average(losses_v)
In [38]:
smooth_loss=moving_average(losses)
In [39]:
plt.plot([x/float(total_batch) for x in range(len(smooth_loss_e))],smooth_loss_e,label='W2V Embeddings')
plt.plot([x/float(total_batch) for x in range(len(smooth_loss_v))],smooth_loss_v,label='E2E Training')
plt.plot([x*2/float(total_batch) for x in range(len(smooth_loss))],smooth_loss,label='VAE2Def Embeddings')
plt.title('Flickr-30k Caption Reconstruction Loss')
plt.xlabel('epochs')
plt.ylabel('Softmax Cross Entropy Loss')
plt.legend()
Out[39]:
In [123]:
smooth_ctc=moving_average(ctc)
In [126]:
plt.plot(ctc)
Out[126]:
In [125]:
ctc
Out[125]:
In [42]:
smooth_loss=moving_average(loss,3)
In [43]:
plt.plot(smooth_loss)
Out[43]:
In [46]:
plt.plot(loss)
Out[46]:
In [ ]: