In [1]:
from vahun.LogReader import LogReader
import pandas as pd
log=LogReader()
table=log.get_full_table()
table.to_csv('/mnt/store/velkey/result_full.tsv',sep='\t')
print(table)


                                      Experiment  Encoded_len Uniq_words  \
0       uniq_variational_top_uni__20170426181649           20     200000   
1       uniq_variational_top_uni__20170426181649           20     200000   
2       uniq_autoencoder_top_uni__20170426181705           20     200000   
3       uniq_autoencoder_top_uni__20170426181705           20     200000   
4       uniq_autoencoder_top_uni__20170426182211           40     200000   
5       uniq_autoencoder_top_uni__20170426182211           40     200000   
6       uniq_variational_top_uni__20170426182341           40     200000   
7       uniq_variational_top_uni__20170426182341           40     200000   
8       uniq_autoencoder_top_uni__20170426182714           60     200000   
9       uniq_autoencoder_top_uni__20170426182714           60     200000   
10      uniq_variational_top_uni__20170426182915           60     200000   
11      uniq_variational_top_uni__20170426182915           60     200000   
12      uniq_autoencoder_top_uni__20170426182925           20     200000   
13      uniq_autoencoder_top_uni__20170426182925           20     200000   
14      uniq_autoencoder_top_uni__20170426183223           80     200000   
15      uniq_autoencoder_top_uni__20170426183223           80     200000   
16      uniq_autoencoder_top_uni__20170426183350           20     200000   
17      uniq_autoencoder_top_uni__20170426183350           20     200000   
18      uniq_variational_top_uni__20170426183613           80     200000   
19      uniq_variational_top_uni__20170426183613           80     200000   
20      uniq_autoencoder_top_uni__20170426183722          100     200000   
21      uniq_autoencoder_top_uni__20170426183722          100     200000   
22      uniq_autoencoder_top_uni__20170426183820           20     200000   
23      uniq_autoencoder_top_uni__20170426183820           20     200000   
24      uniq_autoencoder_top_uni__20170426184232          120     200000   
25      uniq_autoencoder_top_uni__20170426184232          120     200000   
26      uniq_autoencoder_top_uni__20170426184251           20     200000   
27      uniq_autoencoder_top_uni__20170426184251           20     200000   
28      uniq_variational_top_uni__20170426184258          100     200000   
29      uniq_variational_top_uni__20170426184258          100     200000   
...                                          ...          ...        ...   
2173  uniq_autoencoder_segmented__20170510160419          540     273178   
2174  uniq_autoencoder_segmented__20170510160419          540     273178   
2175         uniq_autoencoder_CV__20170510160829          540     259159   
2176         uniq_autoencoder_CV__20170510160829          540     259159   
2177  uniq_variational_segmented__20170510161520          540     273178   
2178  uniq_variational_segmented__20170510161520          540     273178   
2179         uniq_variational_CV__20170510161931          540     259159   
2180         uniq_variational_CV__20170510161931          540     259159   
2181  uniq_autoencoder_segmented__20170510162954          560     273178   
2182  uniq_autoencoder_segmented__20170510162954          560     273178   
2183         uniq_autoencoder_CV__20170510163416          560     259159   
2184         uniq_autoencoder_CV__20170510163416          560     259159   
2185  uniq_variational_segmented__20170510164110          560     273178   
2186  uniq_variational_segmented__20170510164110          560     273178   
2187         uniq_variational_CV__20170510164542          560     259159   
2188         uniq_variational_CV__20170510164542          560     259159   
2189  uniq_autoencoder_segmented__20170510165548          580     273178   
2190  uniq_autoencoder_segmented__20170510165548          580     273178   
2191         uniq_autoencoder_CV__20170510170030          580     259159   
2192         uniq_autoencoder_CV__20170510170030          580     259159   
2193  uniq_variational_segmented__20170510170704          580     273178   
2194  uniq_variational_segmented__20170510170704          580     273178   
2195         uniq_variational_CV__20170510171129          580     259159   
2196         uniq_variational_CV__20170510171129          580     259159   
2197  uniq_autoencoder_segmented__20170510172219          600     273178   
2198  uniq_autoencoder_segmented__20170510172219          600     273178   
2199         uniq_autoencoder_CV__20170510172552          600     259159   
2200         uniq_autoencoder_CV__20170510172552          600     259159   
2201  uniq_variational_segmented__20170510173338          600     273178   
2202         uniq_variational_CV__20170510173725          600     259159   

     Variational  Uniq  Layernum  Train_char_acc  Valid_char_acc  \
0           True  True         2        0.699399        0.698318   
1           True  True         2        0.699329        0.698235   
2          False  True         2        0.696825        0.696345   
3          False  True         2        0.695728        0.695835   
4          False  True         2        0.766488        0.765912   
5          False  True         2        0.765310        0.764845   
6           True  True         2        0.766905        0.765960   
7           True  True         2        0.766304        0.765430   
8          False  True         2        0.815684        0.814975   
9          False  True         2        0.815565        0.815110   
10          True  True         2        0.812797        0.812333   
11          True  True         2        0.812692        0.812187   
12         False  True         4        0.793466        0.792432   
13         False  True         4        0.800906        0.799380   
14         False  True         2        0.848580        0.847882   
15         False  True         2        0.849468        0.848598   
16         False  True         4        0.802851        0.801817   
17         False  True         4        0.807402        0.805930   
18          True  True         2        0.847447        0.846897   
19          True  True         2        0.847630        0.846988   
20         False  True         2        0.879867        0.878848   
21         False  True         2        0.879648        0.878698   
22         False  True         4        0.805708        0.804340   
23         False  True         4        0.804753        0.803462   
24         False  True         2        0.901023        0.899970   
25         False  True         2        0.900562        0.899625   
26         False  True         4        0.809498        0.808140   
27         False  True         4        0.807624        0.806068   
28          True  True         2        0.877074        0.876700   
29          True  True         2        0.877083        0.876517   
...          ...   ...       ...             ...             ...   
2173       False  True         2        0.881281        0.850677   
2174       False  True         2        0.880760        0.850059   
2175       False  True         2        0.860798        0.843756   
2176       False  True         2        0.860869        0.843773   
2177        True  True         2        0.840781        0.805949   
2178        True  True         2        0.840884        0.806110   
2179        True  True         2        0.824895        0.809824   
2180        True  True         2        0.824951        0.809931   
2181       False  True         2        0.881498        0.850936   
2182       False  True         2        0.882108        0.851210   
2183       False  True         2        0.861339        0.844330   
2184       False  True         2        0.861618        0.844521   
2185        True  True         2        0.841010        0.806205   
2186        True  True         2        0.840983        0.805790   
2187        True  True         2        0.824666        0.809342   
2188        True  True         2        0.824900        0.809662   
2189       False  True         2        0.882199        0.851333   
2190       False  True         2        0.882069        0.851454   
2191       False  True         2        0.862265        0.845041   
2192       False  True         2        0.875390        0.844917   
2193        True  True         2        0.840470        0.805808   
2194        True  True         2        0.840613        0.805630   
2195        True  True         2        0.841646        0.809758   
2196        True  True         2        0.841475        0.809552   
2197       False  True         2        0.882566        0.851680   
2198       False  True         2        0.882607        0.851546   
2199       False  True         2        0.875575        0.844993   
2200       False  True         2        0.875777        0.844976   
2201        True  True         2        0.840522        0.805695   
2202        True  True         2        0.841503        0.810038   

      Test_char_acc  Test_word_acc  Test_Leven_avg  Train_Leven_avg  \
0          0.701747       0.000750        5.956950         5.995712   
1          0.701557       0.000950        5.958300         5.997044   
2          0.694913       0.000550        6.089100         6.050719   
3          0.694362       0.000550        6.099150         6.072012   
4          0.764868       0.007200        4.694850         4.663012   
5          0.763818       0.007150        4.715600         4.687056   
6          0.768895       0.007150        4.619850         4.650450   
7          0.768230       0.006500        4.629150         4.664187   
8          0.813835       0.024700        3.719150         3.682937   
9          0.814045       0.024500        3.715550         3.685225   
10         0.814685       0.024500        3.700400         3.734394   
11         0.814473       0.024850        3.704350         3.735806   
12         0.792952       0.014100        4.130750         4.121738   
13         0.801033       0.016850        3.972850         3.975375   
14         0.846850       0.054400        3.060100         3.025913   
15         0.847935       0.055650        3.038550         3.008200   
16         0.802702       0.018450        3.938000         3.935069   
17         0.806933       0.020900        3.854450         3.845844   
18         0.849120       0.054650        3.010050         3.039200   
19         0.849380       0.053200        3.006500         3.037288   
20         0.878408       0.113900        2.430850         2.401425   
21         0.878255       0.112650        2.433900         2.405931   
22         0.805593       0.020100        3.880050         3.878594   
23         0.804272       0.019250        3.907550         3.897744   
24         0.899540       0.175500        2.008150         1.978163   
25         0.899203       0.173900        2.014650         1.987344   
26         0.809435       0.023500        3.804350         3.802719   
27         0.807183       0.020000        3.847100         3.839081   
28         0.878865       0.110450        2.418050         2.447331   
29         0.879000       0.110500        2.420500         2.446737   
...             ...            ...             ...              ...   
2173       0.848145       0.018600        3.037100         2.374388   
2174       0.847746       0.019100        3.045075         2.384787   
2175       0.843383       0.032895        3.132307         2.784012   
2176       0.843485       0.033096        3.130279         2.782595   
2177       0.803365       0.003300        3.932675         3.184375   
2178       0.803596       0.002900        3.927900         3.182300   
2179       0.808991       0.011366        3.819727         3.501771   
2180       0.809015       0.010890        3.819226         3.500698   
2181       0.848243       0.018400        3.035150         2.370050   
2182       0.848641       0.019400        3.027175         2.357838   
2183       0.843739       0.033696        3.125222         2.773204   
2184       0.844159       0.034247        3.116811         2.767609   
2185       0.803851       0.003225        3.922775         3.179725   
2186       0.803550       0.003275        3.928900         3.180262   
2187       0.808893       0.010740        3.821755         3.506327   
2188       0.809060       0.010815        3.818325         3.501671   
2189       0.848595       0.019275        3.028075         2.356025   
2190       0.849228       0.019925        3.015450         2.358613   
2191       0.844566       0.034472        3.108649         2.754669   
2192       0.844505       0.033822        3.109901         2.492189   
2193       0.803306       0.002725        3.933725         3.190575   
2194       0.803435       0.003175        3.931200         3.187712   
2195       0.809256       0.011691        3.814570         3.166667   
2196       0.808778       0.010815        3.823783         3.170259   
2197       0.849081       0.019550        3.018375         2.348688   
2198       0.849150       0.019575        3.017000         2.347850   
2199       0.844608       0.033696        3.107798         2.488509   
2200       0.844874       0.034673        3.102466         2.484454   
2201       0.803397       0.002550        3.931950         3.189525   
2202       0.809045       0.011240        3.818701         3.169658   

      Valid_Leven_avg               Layers  
0            6.018550            [20, 960]  
1            6.020550            [20, 960]  
2            6.061100            [20, 960]  
3            6.070100            [20, 960]  
4            4.674750            [40, 960]  
5            4.696250            [40, 960]  
6            4.673700            [40, 960]  
7            4.687600            [40, 960]  
8            3.697450            [60, 960]  
9            3.695050            [60, 960]  
10           3.749500            [60, 960]  
11           3.749550            [60, 960]  
12           4.141350  [220, 20, 220, 960]  
13           4.005900  [220, 20, 220, 960]  
14           3.039850            [80, 960]  
15           3.026000            [80, 960]  
16           3.955300  [260, 20, 260, 960]  
17           3.875050  [260, 20, 260, 960]  
18           3.057450            [80, 960]  
19           3.058150            [80, 960]  
20           2.421850           [100, 960]  
21           2.425000           [100, 960]  
22           3.905600  [300, 20, 300, 960]  
23           3.921850  [300, 20, 300, 960]  
24           1.999150           [120, 960]  
25           2.005800           [120, 960]  
26           3.829400  [340, 20, 340, 960]  
27           3.870550  [340, 20, 340, 960]  
28           2.465600           [100, 960]  
29           2.465200           [100, 960]  
...               ...                  ...  
2173         2.986450           [540, 980]  
2174         2.998800           [540, 980]  
2175         3.124822           [540, 980]  
2176         3.124496           [540, 980]  
2177         3.880975           [540, 980]  
2178         3.877650           [540, 980]  
2179         3.803029           [540, 980]  
2180         3.800926           [540, 980]  
2181         2.981275           [560, 980]  
2182         2.975800           [560, 980]  
2183         3.113356           [560, 980]  
2184         3.109526           [560, 980]  
2185         3.875825           [560, 980]  
2186         3.884125           [560, 980]  
2187         3.812743           [560, 980]  
2188         3.806359           [560, 980]  
2189         2.973350           [580, 980]  
2190         2.970900           [580, 980]  
2191         3.099136           [580, 980]  
2192         3.101590           [580, 980]  
2193         3.883650           [580, 980]  
2194         3.887325           [580, 980]  
2195         3.804356           [580, 980]  
2196         3.808537           [580, 980]  
2197         2.966400           [600, 980]  
2198         2.969025           [600, 980]  
2199         3.099987           [600, 980]  
2200         3.100463           [600, 980]  
2201         3.886025           [600, 980]  
2202         3.798924           [600, 980]  

[2203 rows x 14 columns]