In [1]:
import operator

import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
from matplotlib import rcParams, cm
from mpl_toolkits.mplot3d import Axes3D
import tensorflow.contrib.keras as keras

from vixstructure.data import LongPricesDataset

In [2]:
rcParams["figure.figsize"] = 12, 6

In [3]:
dataset = LongPricesDataset("data/8_m_settle.csv", "data/expirations.csv")

In [4]:
x = dataset.term_structure.data_frame
y = x.apply(lambda x: [np.nan] + [2*x.iloc[i] - x.iloc[i-1] - x.iloc[i+1] for i in range(1, 7)] + [np.nan],
            axis=1).iloc[:, 1:-1]

In [5]:
y.describe()


Out[5]:
M2 M3 M4 M5 M6 M7
count 2656.000000 2656.000000 2656.000000 2629.000000 2455.000000 2146.000000
mean 0.277579 0.178172 0.014480 0.054692 0.006312 0.129515
std 1.289689 0.740439 0.543991 0.497983 0.481377 0.456469
min -13.679996 -9.489994 -3.250000 -2.909998 -3.969997 -3.110001
25% -0.102501 -0.039999 -0.149998 -0.110001 -0.150002 -0.049999
50% 0.289999 0.229999 0.090000 0.100000 0.050001 0.149998
75% 0.800001 0.480001 0.260000 0.299997 0.200003 0.300001
max 3.949997 3.290001 2.099998 4.900000 2.080002 3.119995

In [6]:
def splitted_dataset(x, y, validation_split = 0.15, test_split = 0.15):
    assert len(x) == len(y)
    val_length = int(len(x) * validation_split / 2)
    test_length = int(len(x) * test_split / 2)
    x_fst = x.iloc[:int(len(x) / 2)]
    x_snd = x.iloc[int(len(x) / 2):]
    y_fst = y.iloc[:int(len(y) / 2)]
    y_snd = y.iloc[int(len(y) / 2):]
    x_train, y_train = (x_fst.iloc[:-(val_length + test_length)].append(x_snd.iloc[:-(val_length + test_length)]),
                        y_fst.iloc[:-(val_length + test_length)].append(y_snd.iloc[:-(val_length + test_length)]))
    x_val, y_val = (x_fst.iloc[-(val_length + test_length):-test_length].append(x_snd.iloc[-(val_length + test_length):-test_length]),
                    y_fst.iloc[-(val_length + test_length):-test_length].append(y_snd.iloc[-(val_length + test_length):-test_length]))
    x_test, y_test = (x_fst.iloc[-test_length:].append(x_snd.iloc[-test_length:]),
                      y_fst.iloc[-test_length:].append(y_snd[-test_length:]))
    return (x_train, y_train), (x_val, y_val), (x_test, y_test)

Classification

Use up to five categories, thresholding with these values:

  • + → 0.1
  • 0 → 0
  • - → -0.1

In [7]:
def get_categories(y):
    y = y.dropna()
    plus = (0.1 < y)
    zero = (-0.1 <= y) & (y <= 0.1)
    minus = (y < -0.1)
    return plus, zero, minus

In [8]:
def get_count(plus, zero, minus):
    return pd.concat(map(operator.methodcaller("sum"), [plus, zero, minus]), axis=1, keys=["plus", "zero", "minus"])

In [42]:
plus, zero, minus = get_categories(y)
count = get_count(plus, zero, minus)
count.plot.bar(figsize=(8, 3))
plt.legend(("> 0.1", "≈ 0", "< 0.1"), framealpha=0.7, loc=9)
plt.title("Number of samples per category.")
plt.xticks(range(6), range(1, 7))
plt.xlabel("Leg")
plt.ylabel("Samples")
plt.savefig("classification-samples-per-category.pdf", format="pdf", dpi=300, bbox_inches="tight")
plt.show()



In [120]:
scaling = count.apply(lambda x: [1 / (xi / x[0]) for xi in x], axis=1)
scaling
# The necessary scaling factor


Out[120]:
plus zero minus
M2 1.0 5.567797 2.204698
M3 1.0 4.259939 3.269953
M4 1.0 2.246888 1.864028
M5 1.0 2.354701 1.913194
M6 1.0 1.507438 1.449921
M7 1.0 2.995238 2.688034

That is not very balanced. Better take zero and minus category times two.


In [89]:
for dataset, name in zip(splitted_dataset(x, y), ("Training Set", "Validation Set", "Test Set")):
    cats = get_categories(y)
    cnt = get_count(*cats)
    cnt.plot.bar(figsize=(12, 3))
    plt.title(name)
plt.show()


Even the splitted data sets look similar. Seems there is no problem with doubling the underrepresented categories.

Now for the mapping

$N \times M \to N \times M \times 3$


In [172]:
target = np.dstack((plus.values, zero.values, minus.values)).astype(float)
target.shape


Out[172]:
(2146, 6, 3)

In [173]:
scaled_target = target * scaling.values
scaled_target.shape


Out[173]:
(2146, 6, 3)

In [190]:
inputs = x.dropna().values
inputs = np.diff(inputs, axis=1)
inputs.shape


Out[190]:
(2146, 7)

Testwise training

Only for first leg


In [262]:
leg_nr = 0
(x_train, y_train), (x_val, y_val), (x_test, y_test) = splitted_dataset(
    pd.DataFrame(inputs), pd.DataFrame(target[:,leg_nr,:]))

In [276]:
input_layer = keras.layers.Input(shape=(7,), name="inputs")
hidden_layer = keras.layers.Dense(30, activation="relu", name="hidden")(input_layer)
output_layer = keras.layers.Dense(3, activation="softmax", name="predictions")(hidden_layer)
model = keras.models.Model(inputs=input_layer, outputs=output_layer)
print(model.summary())
model.compile(optimizer="Adam",
              loss='categorical_crossentropy',
              metrics=['accuracy'])
model.fit(x_train.values[:-1], y_train.values[1:], epochs=100, batch_size=32,
          validation_data=(x_val.values[:-1], y_val.values[1:]),
          class_weight={k:v for k, v in enumerate(scaling.iloc[leg_nr].values)})


_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
inputs (InputLayer)          (None, 7)                 0         
_________________________________________________________________
hidden (Dense)               (None, 30)                240       
_________________________________________________________________
predictions (Dense)          (None, 3)                 93        
=================================================================
Total params: 333
Trainable params: 333
Non-trainable params: 0
_________________________________________________________________
None
Train on 1505 samples, validate on 319 samples
Epoch 1/100
1505/1505 [==============================] - 0s - loss: 2.1602 - acc: 0.5362 - val_loss: 0.8369 - val_acc: 0.8401
Epoch 2/100
1505/1505 [==============================] - 0s - loss: 1.8694 - acc: 0.6877 - val_loss: 0.7239 - val_acc: 0.8589
Epoch 3/100
1505/1505 [==============================] - 0s - loss: 1.7584 - acc: 0.6565 - val_loss: 0.6944 - val_acc: 0.8339
Epoch 4/100
1505/1505 [==============================] - 0s - loss: 1.6892 - acc: 0.6698 - val_loss: 0.6156 - val_acc: 0.8464
Epoch 5/100
1505/1505 [==============================] - 0s - loss: 1.6310 - acc: 0.6944 - val_loss: 0.5927 - val_acc: 0.8401
Epoch 6/100
1505/1505 [==============================] - 0s - loss: 1.5881 - acc: 0.6558 - val_loss: 0.5632 - val_acc: 0.8339
Epoch 7/100
1505/1505 [==============================] - 0s - loss: 1.5470 - acc: 0.6777 - val_loss: 0.5347 - val_acc: 0.8433
Epoch 8/100
1505/1505 [==============================] - 0s - loss: 1.5170 - acc: 0.6957 - val_loss: 0.5174 - val_acc: 0.8464
Epoch 9/100
1505/1505 [==============================] - 0s - loss: 1.4929 - acc: 0.7017 - val_loss: 0.5104 - val_acc: 0.8464
Epoch 10/100
1505/1505 [==============================] - 0s - loss: 1.4708 - acc: 0.6844 - val_loss: 0.4714 - val_acc: 0.8621
Epoch 11/100
1505/1505 [==============================] - 0s - loss: 1.4529 - acc: 0.6983 - val_loss: 0.5106 - val_acc: 0.8401
Epoch 12/100
1505/1505 [==============================] - 0s - loss: 1.4357 - acc: 0.6658 - val_loss: 0.4529 - val_acc: 0.8621
Epoch 13/100
1505/1505 [==============================] - 0s - loss: 1.4216 - acc: 0.7110 - val_loss: 0.4477 - val_acc: 0.8777
Epoch 14/100
1505/1505 [==============================] - 0s - loss: 1.4078 - acc: 0.7043 - val_loss: 0.4382 - val_acc: 0.8621
Epoch 15/100
1505/1505 [==============================] - 0s - loss: 1.3987 - acc: 0.6930 - val_loss: 0.4108 - val_acc: 0.8746
Epoch 16/100
1505/1505 [==============================] - 0s - loss: 1.3920 - acc: 0.7083 - val_loss: 0.4164 - val_acc: 0.8777
Epoch 17/100
1505/1505 [==============================] - 0s - loss: 1.3817 - acc: 0.6944 - val_loss: 0.4589 - val_acc: 0.8589
Epoch 18/100
1505/1505 [==============================] - 0s - loss: 1.3703 - acc: 0.7043 - val_loss: 0.4088 - val_acc: 0.8809
Epoch 19/100
1505/1505 [==============================] - 0s - loss: 1.3638 - acc: 0.7103 - val_loss: 0.4378 - val_acc: 0.8683
Epoch 20/100
1505/1505 [==============================] - 0s - loss: 1.3631 - acc: 0.6804 - val_loss: 0.4218 - val_acc: 0.8746
Epoch 21/100
1505/1505 [==============================] - 0s - loss: 1.3572 - acc: 0.7156 - val_loss: 0.4082 - val_acc: 0.8903
Epoch 22/100
1505/1505 [==============================] - 0s - loss: 1.3564 - acc: 0.7017 - val_loss: 0.3996 - val_acc: 0.8903
Epoch 23/100
1505/1505 [==============================] - 0s - loss: 1.3535 - acc: 0.7183 - val_loss: 0.4258 - val_acc: 0.8715
Epoch 24/100
1505/1505 [==============================] - 0s - loss: 1.3490 - acc: 0.7282 - val_loss: 0.4134 - val_acc: 0.8809
Epoch 25/100
1505/1505 [==============================] - 0s - loss: 1.3414 - acc: 0.7116 - val_loss: 0.4162 - val_acc: 0.8746
Epoch 26/100
1505/1505 [==============================] - 0s - loss: 1.3357 - acc: 0.7130 - val_loss: 0.4026 - val_acc: 0.8809
Epoch 27/100
1505/1505 [==============================] - 0s - loss: 1.3346 - acc: 0.7322 - val_loss: 0.4016 - val_acc: 0.8871
Epoch 28/100
1505/1505 [==============================] - 0s - loss: 1.3303 - acc: 0.7296 - val_loss: 0.4098 - val_acc: 0.8777
Epoch 29/100
1505/1505 [==============================] - 0s - loss: 1.3282 - acc: 0.7189 - val_loss: 0.4118 - val_acc: 0.8715
Epoch 30/100
1505/1505 [==============================] - 0s - loss: 1.3281 - acc: 0.7176 - val_loss: 0.4033 - val_acc: 0.8809
Epoch 31/100
1505/1505 [==============================] - 0s - loss: 1.3233 - acc: 0.7196 - val_loss: 0.4056 - val_acc: 0.8746
Epoch 32/100
1505/1505 [==============================] - 0s - loss: 1.3220 - acc: 0.7236 - val_loss: 0.4024 - val_acc: 0.8777
Epoch 33/100
1505/1505 [==============================] - 0s - loss: 1.3188 - acc: 0.7196 - val_loss: 0.3990 - val_acc: 0.8746
Epoch 34/100
1505/1505 [==============================] - 0s - loss: 1.3163 - acc: 0.7130 - val_loss: 0.3880 - val_acc: 0.8777
Epoch 35/100
1505/1505 [==============================] - 0s - loss: 1.3148 - acc: 0.7229 - val_loss: 0.3883 - val_acc: 0.8777
Epoch 36/100
1505/1505 [==============================] - 0s - loss: 1.3157 - acc: 0.7402 - val_loss: 0.3877 - val_acc: 0.8809
Epoch 37/100
1505/1505 [==============================] - 0s - loss: 1.3118 - acc: 0.7189 - val_loss: 0.4028 - val_acc: 0.8777
Epoch 38/100
1505/1505 [==============================] - 0s - loss: 1.3161 - acc: 0.7282 - val_loss: 0.3881 - val_acc: 0.8715
Epoch 39/100
1505/1505 [==============================] - 0s - loss: 1.3077 - acc: 0.7103 - val_loss: 0.3956 - val_acc: 0.8746
Epoch 40/100
1505/1505 [==============================] - 0s - loss: 1.3061 - acc: 0.7296 - val_loss: 0.3893 - val_acc: 0.8746
Epoch 41/100
1505/1505 [==============================] - 0s - loss: 1.3037 - acc: 0.7143 - val_loss: 0.3966 - val_acc: 0.8715
Epoch 42/100
1505/1505 [==============================] - 0s - loss: 1.3026 - acc: 0.7302 - val_loss: 0.3897 - val_acc: 0.8746
Epoch 43/100
1505/1505 [==============================] - 0s - loss: 1.3016 - acc: 0.7302 - val_loss: 0.3788 - val_acc: 0.8840
Epoch 44/100
1505/1505 [==============================] - 0s - loss: 1.3083 - acc: 0.7329 - val_loss: 0.3944 - val_acc: 0.8746
Epoch 45/100
1505/1505 [==============================] - 0s - loss: 1.3067 - acc: 0.7150 - val_loss: 0.3907 - val_acc: 0.8777
Epoch 46/100
1505/1505 [==============================] - 0s - loss: 1.3014 - acc: 0.7110 - val_loss: 0.3966 - val_acc: 0.8746
Epoch 47/100
1505/1505 [==============================] - 0s - loss: 1.2972 - acc: 0.7189 - val_loss: 0.3964 - val_acc: 0.8683
Epoch 48/100
1505/1505 [==============================] - 0s - loss: 1.2967 - acc: 0.7349 - val_loss: 0.3980 - val_acc: 0.8715
Epoch 49/100
1505/1505 [==============================] - 0s - loss: 1.3129 - acc: 0.7256 - val_loss: 0.3762 - val_acc: 0.8840
Epoch 50/100
1505/1505 [==============================] - 0s - loss: 1.3136 - acc: 0.6924 - val_loss: 0.3988 - val_acc: 0.8746
Epoch 51/100
1505/1505 [==============================] - 0s - loss: 1.2938 - acc: 0.7196 - val_loss: 0.3919 - val_acc: 0.8715
Epoch 52/100
1505/1505 [==============================] - 0s - loss: 1.2918 - acc: 0.7183 - val_loss: 0.3854 - val_acc: 0.8715
Epoch 53/100
1505/1505 [==============================] - 0s - loss: 1.2892 - acc: 0.7389 - val_loss: 0.3855 - val_acc: 0.8715
Epoch 54/100
1505/1505 [==============================] - 0s - loss: 1.2885 - acc: 0.7276 - val_loss: 0.3758 - val_acc: 0.8840
Epoch 55/100
1505/1505 [==============================] - 0s - loss: 1.2878 - acc: 0.7455 - val_loss: 0.3961 - val_acc: 0.8715
Epoch 56/100
1505/1505 [==============================] - 0s - loss: 1.2872 - acc: 0.7329 - val_loss: 0.3923 - val_acc: 0.8715
Epoch 57/100
1505/1505 [==============================] - 0s - loss: 1.2843 - acc: 0.7342 - val_loss: 0.3803 - val_acc: 0.8746
Epoch 58/100
1505/1505 [==============================] - 0s - loss: 1.2850 - acc: 0.7276 - val_loss: 0.3936 - val_acc: 0.8715
Epoch 59/100
1505/1505 [==============================] - 0s - loss: 1.2831 - acc: 0.7262 - val_loss: 0.3818 - val_acc: 0.8715
Epoch 60/100
1505/1505 [==============================] - 0s - loss: 1.2839 - acc: 0.7528 - val_loss: 0.3946 - val_acc: 0.8683
Epoch 61/100
1505/1505 [==============================] - 0s - loss: 1.2842 - acc: 0.7422 - val_loss: 0.3907 - val_acc: 0.8715
Epoch 62/100
1505/1505 [==============================] - 0s - loss: 1.2800 - acc: 0.7389 - val_loss: 0.3839 - val_acc: 0.8746
Epoch 63/100
1505/1505 [==============================] - 0s - loss: 1.2803 - acc: 0.7189 - val_loss: 0.3906 - val_acc: 0.8683
Epoch 64/100
1505/1505 [==============================] - 0s - loss: 1.2794 - acc: 0.7389 - val_loss: 0.3906 - val_acc: 0.8683
Epoch 65/100
1505/1505 [==============================] - 0s - loss: 1.2792 - acc: 0.7349 - val_loss: 0.3749 - val_acc: 0.8715
Epoch 66/100
1505/1505 [==============================] - 0s - loss: 1.2820 - acc: 0.7475 - val_loss: 0.3796 - val_acc: 0.8746
Epoch 67/100
1505/1505 [==============================] - 0s - loss: 1.2751 - acc: 0.7289 - val_loss: 0.3922 - val_acc: 0.8715
Epoch 68/100
1505/1505 [==============================] - 0s - loss: 1.2752 - acc: 0.7422 - val_loss: 0.3868 - val_acc: 0.8683
Epoch 69/100
1505/1505 [==============================] - 0s - loss: 1.2730 - acc: 0.7329 - val_loss: 0.3910 - val_acc: 0.8652
Epoch 70/100
1505/1505 [==============================] - 0s - loss: 1.2765 - acc: 0.7389 - val_loss: 0.3726 - val_acc: 0.8715
Epoch 71/100
1505/1505 [==============================] - 0s - loss: 1.2743 - acc: 0.7468 - val_loss: 0.3820 - val_acc: 0.8715
Epoch 72/100
1505/1505 [==============================] - 0s - loss: 1.2747 - acc: 0.7349 - val_loss: 0.3863 - val_acc: 0.8746
Epoch 73/100
1505/1505 [==============================] - 0s - loss: 1.2770 - acc: 0.7375 - val_loss: 0.3869 - val_acc: 0.8683
Epoch 74/100
1505/1505 [==============================] - 0s - loss: 1.2702 - acc: 0.7389 - val_loss: 0.3782 - val_acc: 0.8683
Epoch 75/100
1505/1505 [==============================] - 0s - loss: 1.2697 - acc: 0.7449 - val_loss: 0.3812 - val_acc: 0.8715
Epoch 76/100
1505/1505 [==============================] - 0s - loss: 1.2702 - acc: 0.7309 - val_loss: 0.3891 - val_acc: 0.8652
Epoch 77/100
1505/1505 [==============================] - 0s - loss: 1.2730 - acc: 0.7442 - val_loss: 0.3759 - val_acc: 0.8715
Epoch 78/100
1505/1505 [==============================] - 0s - loss: 1.2662 - acc: 0.7375 - val_loss: 0.3824 - val_acc: 0.8715
Epoch 79/100
1505/1505 [==============================] - 0s - loss: 1.2652 - acc: 0.7409 - val_loss: 0.3713 - val_acc: 0.8683
Epoch 80/100
1505/1505 [==============================] - 0s - loss: 1.2663 - acc: 0.7495 - val_loss: 0.3780 - val_acc: 0.8715
Epoch 81/100
1505/1505 [==============================] - 0s - loss: 1.2663 - acc: 0.7369 - val_loss: 0.3772 - val_acc: 0.8683
Epoch 82/100
1505/1505 [==============================] - 0s - loss: 1.2697 - acc: 0.7475 - val_loss: 0.3788 - val_acc: 0.8715
Epoch 83/100
1505/1505 [==============================] - 0s - loss: 1.2647 - acc: 0.7429 - val_loss: 0.3875 - val_acc: 0.8683
Epoch 84/100
1505/1505 [==============================] - 0s - loss: 1.2752 - acc: 0.7050 - val_loss: 0.3902 - val_acc: 0.8683
Epoch 85/100
1505/1505 [==============================] - 0s - loss: 1.2624 - acc: 0.7389 - val_loss: 0.3811 - val_acc: 0.8715
Epoch 86/100
1505/1505 [==============================] - 0s - loss: 1.2618 - acc: 0.7462 - val_loss: 0.3729 - val_acc: 0.8683
Epoch 87/100
1505/1505 [==============================] - 0s - loss: 1.2630 - acc: 0.7442 - val_loss: 0.3719 - val_acc: 0.8715
Epoch 88/100
1505/1505 [==============================] - 0s - loss: 1.2625 - acc: 0.7449 - val_loss: 0.3880 - val_acc: 0.8652
Epoch 89/100
1505/1505 [==============================] - 0s - loss: 1.2593 - acc: 0.7262 - val_loss: 0.3686 - val_acc: 0.8621
Epoch 90/100
1505/1505 [==============================] - 0s - loss: 1.2633 - acc: 0.7468 - val_loss: 0.3706 - val_acc: 0.8621
Epoch 91/100
1505/1505 [==============================] - 0s - loss: 1.2595 - acc: 0.7389 - val_loss: 0.3703 - val_acc: 0.8589
Epoch 92/100
1505/1505 [==============================] - 0s - loss: 1.2668 - acc: 0.7455 - val_loss: 0.3725 - val_acc: 0.8683
Epoch 93/100
1505/1505 [==============================] - 0s - loss: 1.2558 - acc: 0.7336 - val_loss: 0.3732 - val_acc: 0.8683
Epoch 94/100
1505/1505 [==============================] - 0s - loss: 1.2593 - acc: 0.7528 - val_loss: 0.3813 - val_acc: 0.8683
Epoch 95/100
1505/1505 [==============================] - 0s - loss: 1.2565 - acc: 0.7429 - val_loss: 0.3856 - val_acc: 0.8621
Epoch 96/100
1505/1505 [==============================] - 0s - loss: 1.2542 - acc: 0.7369 - val_loss: 0.3670 - val_acc: 0.8621
Epoch 97/100
1505/1505 [==============================] - 0s - loss: 1.2548 - acc: 0.7488 - val_loss: 0.3835 - val_acc: 0.8715
Epoch 98/100
1505/1505 [==============================] - 0s - loss: 1.2527 - acc: 0.7409 - val_loss: 0.3807 - val_acc: 0.8683
Epoch 99/100
1505/1505 [==============================] - 0s - loss: 1.2527 - acc: 0.7468 - val_loss: 0.3749 - val_acc: 0.8621
Epoch 100/100
1505/1505 [==============================] - 0s - loss: 1.2524 - acc: 0.7495 - val_loss: 0.3784 - val_acc: 0.8589
Out[276]:
<tensorflow.contrib.keras.python.keras.callbacks.History at 0x7f436503d048>

In [283]:
test_pred = model.predict(x_test.values[:-1])

In [278]:
def accuracy(y1, y2):
    return np.equal(np.argmax(y1, axis=-1), np.argmax(y2, axis=-1)).sum() / len(y1)

In [284]:
# Predicted accuracy
accuracy(test_pred, y_test.values[1:])


Out[284]:
0.70846394984326022

In [285]:
# Naive accuracy
accuracy(y_test.values[:-1], y_test.values[1:])


Out[285]:
0.80250783699059558

Conclusion:

Classification works equally bad.


In [328]:
results_dict = {}

In [332]:
# Try for all the legs:
#days = 1
for days in range(1, 23, 3):
    network_predictions = []
    naive_predictions = []
    for leg_nr in range(6):
        (x_train, y_train), (x_val, y_val), (x_test, y_test) = splitted_dataset(
            pd.DataFrame(inputs), pd.DataFrame(target[:,leg_nr,:]))
        input_layer = keras.layers.Input(shape=(7,), name="inputs")
        hidden_layer = keras.layers.Dense(30, activation="relu", name="hidden")(input_layer)
        output_layer = keras.layers.Dense(3, activation="softmax", name="predictions")(hidden_layer)
        model = keras.models.Model(inputs=input_layer, outputs=output_layer)
        model.compile(optimizer="Adam",
                      loss='categorical_crossentropy',
                      metrics=['accuracy'])
        model.fit(x_train.values[:-days], y_train.values[days:], epochs=100, batch_size=32,
                  validation_data=(x_val.values[:-days], y_val.values[days:]),
                  class_weight={k:v for k, v in enumerate(scaling.iloc[leg_nr].values)},
                  verbose=0)
        test_pred = model.predict(x_test.values[:-days])
        pred_acc = accuracy(test_pred, y_test.values[days:])
        naive_acc = accuracy(y_test.values[:-days], y_test.values[days:])
        network_predictions.append(pred_acc)
        naive_predictions.append(naive_acc)
    results = pd.DataFrame([pd.Series(naive_predictions), pd.Series(network_predictions)])
    results.columns = ["V" + str(i) for i in range(1, 7)]
    results.index = ["Naive", "Network"]
    results_dict[days] = results

In [396]:
results_dict[1].T.plot.bar(figsize=(4, 4), width=0.8)
plt.legend(loc="lower center")
plt.axhline(results_dict[1].loc["Naive"].mean(), color="#1f77b4")
plt.axhline(results_dict[1].loc["Network"].mean(), color="#ff7f0e")
plt.ylabel("Accuracy")
plt.xticks(np.arange(6), list(range(1, 7)))
plt.savefig("classification-1.pdf", format="pdf", dpi=300, bbox_inches="tight")
plt.show()



In [400]:
plt.figure(figsize=(4,4))
plt.plot(list(results_dict.keys()),
         [results_dict[i].loc["Naive"].mean() for i in results_dict],
         list(results_dict.keys()),
         [results_dict[i].loc["Network"].mean() for i in results_dict],
         linewidth=2)
plt.legend(("Naive", "Network"))
plt.ylabel("Mean accuracy")
plt.axhline(0.5, color="grey", alpha=0.75)
plt.xlim(1, 22)
plt.grid(axis="x")
plt.xticks(list(results_dict.keys()))
plt.savefig("classification-all.pdf", format="pdf", dpi=300, bbox_inches="tight")
plt.show()