In [1]:
import numpy as np
import pandas as pd
import datetime as dt
import matplotlib.pyplot as plt
from keras.datasets import imdb
from keras.models import Sequential
from keras.layers import Dense
from keras.layers import LSTM
from keras.layers import Dropout
from keras.layers.embeddings import Embedding
from keras.preprocessing import sequence
from sklearn.metrics import confusion_matrix, roc_curve, auc
# fix random seed for reproducibility
np.random.seed(123)
# magic fix for displaying plots
%matplotlib inline
Using TensorFlow backend.
In [2]:
project_dir = r'/Users/hudson/Code/marketModel/'
In [3]:
# load the dataset
trainingData = np.load(project_dir + 'data/trainingData.npz')
X = trainingData['X']
Y = trainingData['Y']
print X.shape
print Y.shape
(9958, 360, 130)
(9958, 26)
In [4]:
batch_size = 49
num_samples = 100*batch_size
num_features = 1
X_train = X[0:num_samples, :, 0:num_features]
X_test = X[num_samples:2*num_samples, :, 0:num_features]
y_train = Y[0:num_samples, 0]
y_test = Y[num_samples:2*num_samples,0]
In [5]:
print 'Shape of X_train: ' + str(X_train.shape)
print 'Shape of y_train: ' + str(y_train.shape)
print 'Shape of X_test: ' + str(X_test.shape)
print 'Shape of y_test: ' + str(y_test.shape)
Shape of X_train: (4900, 360, 1)
Shape of y_train: (4900,)
Shape of X_test: (4900, 360, 1)
Shape of y_test: (4900,)
In [6]:
# create the model
model = Sequential()
model.add(LSTM(128, batch_input_shape = (batch_size, 360, num_features), stateful=True))
model.add(Dropout(0.2))
model.add(Dense(1, activation='sigmoid'))
model.compile(loss='binary_crossentropy', optimizer='adam', metrics=['accuracy'])
print(model.summary())
model.fit(X_train, y_train, epochs=150, batch_size=batch_size, shuffle=False)
_________________________________________________________________
Layer (type) Output Shape Param #
=================================================================
lstm_1 (LSTM) (49, 128) 66560
_________________________________________________________________
dropout_1 (Dropout) (49, 128) 0
_________________________________________________________________
dense_1 (Dense) (49, 1) 129
=================================================================
Total params: 66,689
Trainable params: 66,689
Non-trainable params: 0
_________________________________________________________________
None
Epoch 1/150
4900/4900 [==============================] - 81s - loss: 0.5364 - acc: 0.8549
Epoch 2/150
4900/4900 [==============================] - 80s - loss: 0.3716 - acc: 0.8949
Epoch 3/150
4900/4900 [==============================] - 80s - loss: 0.3663 - acc: 0.8949
Epoch 4/150
4900/4900 [==============================] - 80s - loss: 0.3631 - acc: 0.8949
Epoch 5/150
4900/4900 [==============================] - 80s - loss: 0.3597 - acc: 0.8949
Epoch 6/150
4900/4900 [==============================] - 80s - loss: 0.3608 - acc: 0.8949
Epoch 7/150
4900/4900 [==============================] - 80s - loss: 0.3563 - acc: 0.8949
Epoch 8/150
4900/4900 [==============================] - 79s - loss: 0.3564 - acc: 0.8949
Epoch 9/150
4900/4900 [==============================] - 80s - loss: 0.3556 - acc: 0.8949
Epoch 10/150
4900/4900 [==============================] - 80s - loss: 0.3518 - acc: 0.8949
Epoch 11/150
4900/4900 [==============================] - 80s - loss: 0.3533 - acc: 0.8949
Epoch 12/150
4900/4900 [==============================] - 82s - loss: 0.3526 - acc: 0.8949
Epoch 13/150
4900/4900 [==============================] - 79s - loss: 0.3524 - acc: 0.8949
Epoch 14/150
4900/4900 [==============================] - 81s - loss: 0.3527 - acc: 0.8949
Epoch 15/150
4900/4900 [==============================] - 80s - loss: 0.3514 - acc: 0.8949
Epoch 16/150
4900/4900 [==============================] - 80s - loss: 0.3504 - acc: 0.8949
Epoch 17/150
4900/4900 [==============================] - 81s - loss: 0.3514 - acc: 0.8949
Epoch 18/150
4900/4900 [==============================] - 81s - loss: 0.3505 - acc: 0.8949
Epoch 19/150
4900/4900 [==============================] - 81s - loss: 0.3491 - acc: 0.8949
Epoch 20/150
4900/4900 [==============================] - 81s - loss: 0.3510 - acc: 0.8949
Epoch 21/150
4900/4900 [==============================] - 81s - loss: 0.3493 - acc: 0.8949
Epoch 22/150
4900/4900 [==============================] - 81s - loss: 0.3499 - acc: 0.8949
Epoch 23/150
4900/4900 [==============================] - 81s - loss: 0.3506 - acc: 0.8949
Epoch 24/150
4900/4900 [==============================] - 81s - loss: 0.3496 - acc: 0.8949
Epoch 25/150
4900/4900 [==============================] - 81s - loss: 0.3473 - acc: 0.8949
Epoch 26/150
4900/4900 [==============================] - 82s - loss: 0.3483 - acc: 0.8949
Epoch 27/150
4900/4900 [==============================] - 81s - loss: 0.3493 - acc: 0.8949
Epoch 28/150
4900/4900 [==============================] - 81s - loss: 0.3481 - acc: 0.8949
Epoch 29/150
4900/4900 [==============================] - 81s - loss: 0.3488 - acc: 0.8949
Epoch 30/150
4900/4900 [==============================] - 81s - loss: 0.3492 - acc: 0.8949
Epoch 31/150
4900/4900 [==============================] - 79s - loss: 0.3471 - acc: 0.8949
Epoch 32/150
4900/4900 [==============================] - 80s - loss: 0.3465 - acc: 0.8949
Epoch 33/150
4900/4900 [==============================] - 79s - loss: 0.3482 - acc: 0.8949
Epoch 34/150
4900/4900 [==============================] - 80s - loss: 0.3485 - acc: 0.8949
Epoch 35/150
4900/4900 [==============================] - 80s - loss: 0.3459 - acc: 0.8949
Epoch 36/150
4900/4900 [==============================] - 80s - loss: 0.3443 - acc: 0.8949
Epoch 37/150
4900/4900 [==============================] - 80s - loss: 0.3454 - acc: 0.8949
Epoch 38/150
4900/4900 [==============================] - 80s - loss: 0.3464 - acc: 0.8949
Epoch 39/150
4900/4900 [==============================] - 80s - loss: 0.3461 - acc: 0.8949
Epoch 40/150
4900/4900 [==============================] - 80s - loss: 0.3462 - acc: 0.8949
Epoch 41/150
4900/4900 [==============================] - 79s - loss: 0.3459 - acc: 0.8949
Epoch 42/150
4900/4900 [==============================] - 80s - loss: 0.3445 - acc: 0.8949
Epoch 43/150
4900/4900 [==============================] - 80s - loss: 0.3461 - acc: 0.8949
Epoch 44/150
4900/4900 [==============================] - 79s - loss: 0.3472 - acc: 0.8949
Epoch 45/150
4900/4900 [==============================] - 80s - loss: 0.3450 - acc: 0.8949
Epoch 46/150
4900/4900 [==============================] - 80s - loss: 0.3449 - acc: 0.8949
Epoch 47/150
4900/4900 [==============================] - 80s - loss: 0.3474 - acc: 0.8949
Epoch 48/150
4900/4900 [==============================] - 79s - loss: 0.3462 - acc: 0.8949
Epoch 49/150
4900/4900 [==============================] - 79s - loss: 0.3460 - acc: 0.8949
Epoch 50/150
4900/4900 [==============================] - 80s - loss: 0.3466 - acc: 0.8949
Epoch 51/150
4900/4900 [==============================] - 80s - loss: 0.3459 - acc: 0.8949
Epoch 52/150
4900/4900 [==============================] - 80s - loss: 0.3442 - acc: 0.8949
Epoch 53/150
4900/4900 [==============================] - 80s - loss: 0.3441 - acc: 0.8949
Epoch 54/150
4900/4900 [==============================] - 79s - loss: 0.3453 - acc: 0.8949
Epoch 55/150
4900/4900 [==============================] - 80s - loss: 0.3452 - acc: 0.8949
Epoch 56/150
4900/4900 [==============================] - 80s - loss: 0.3442 - acc: 0.8949
Epoch 57/150
4900/4900 [==============================] - 81s - loss: 0.3454 - acc: 0.8949
Epoch 58/150
4900/4900 [==============================] - 80s - loss: 0.3445 - acc: 0.8949
Epoch 59/150
4900/4900 [==============================] - 80s - loss: 0.3433 - acc: 0.8949
Epoch 60/150
4900/4900 [==============================] - 80s - loss: 0.3428 - acc: 0.8949
Epoch 61/150
4900/4900 [==============================] - 79s - loss: 0.3438 - acc: 0.8949
Epoch 62/150
4900/4900 [==============================] - 79s - loss: 0.3437 - acc: 0.8949
Epoch 63/150
4900/4900 [==============================] - 80s - loss: 0.3441 - acc: 0.8949
Epoch 64/150
4900/4900 [==============================] - 80s - loss: 0.3446 - acc: 0.8949
Epoch 65/150
4900/4900 [==============================] - 80s - loss: 0.3439 - acc: 0.8949
Epoch 66/150
4900/4900 [==============================] - 80s - loss: 0.3433 - acc: 0.8949
Epoch 67/150
4900/4900 [==============================] - 80s - loss: 0.3436 - acc: 0.8949
Epoch 68/150
4900/4900 [==============================] - 80s - loss: 0.3424 - acc: 0.8949
Epoch 69/150
4900/4900 [==============================] - 80s - loss: 0.3448 - acc: 0.8949
Epoch 70/150
4900/4900 [==============================] - 80s - loss: 0.3443 - acc: 0.8949
Epoch 71/150
4900/4900 [==============================] - 81s - loss: 0.3434 - acc: 0.8949
Epoch 72/150
4900/4900 [==============================] - 80s - loss: 0.3434 - acc: 0.8949
Epoch 73/150
4900/4900 [==============================] - 82s - loss: 0.3426 - acc: 0.8949
Epoch 74/150
4900/4900 [==============================] - 86s - loss: 0.3434 - acc: 0.8949
Epoch 75/150
4900/4900 [==============================] - 86s - loss: 0.3436 - acc: 0.8949
Epoch 76/150
4900/4900 [==============================] - 81s - loss: 0.3439 - acc: 0.8949
Epoch 77/150
4900/4900 [==============================] - 80s - loss: 0.3440 - acc: 0.8949
Epoch 78/150
4900/4900 [==============================] - 80s - loss: 0.3441 - acc: 0.8949
Epoch 79/150
4900/4900 [==============================] - 80s - loss: 0.3444 - acc: 0.8949
Epoch 80/150
4900/4900 [==============================] - 80s - loss: 0.3443 - acc: 0.8949
Epoch 81/150
4900/4900 [==============================] - 80s - loss: 0.3429 - acc: 0.8949
Epoch 82/150
4900/4900 [==============================] - 80s - loss: 0.3451 - acc: 0.8949
Epoch 83/150
4900/4900 [==============================] - 80s - loss: 0.3440 - acc: 0.8949
Epoch 84/150
4900/4900 [==============================] - 80s - loss: 0.3443 - acc: 0.8949
Epoch 85/150
4900/4900 [==============================] - 80s - loss: 0.3431 - acc: 0.8949
Epoch 86/150
4900/4900 [==============================] - 80s - loss: 0.3429 - acc: 0.8949
Epoch 87/150
4900/4900 [==============================] - 80s - loss: 0.3433 - acc: 0.8949
Epoch 88/150
4900/4900 [==============================] - 80s - loss: 0.3430 - acc: 0.8949
Epoch 89/150
4900/4900 [==============================] - 80s - loss: 0.3453 - acc: 0.8949
Epoch 90/150
4900/4900 [==============================] - 80s - loss: 0.3426 - acc: 0.8949
Epoch 91/150
4900/4900 [==============================] - 80s - loss: 0.3432 - acc: 0.8949
Epoch 92/150
4900/4900 [==============================] - 80s - loss: 0.3434 - acc: 0.8949
Epoch 93/150
4900/4900 [==============================] - 78s - loss: 0.3437 - acc: 0.8949
Epoch 94/150
4900/4900 [==============================] - 79s - loss: 0.3435 - acc: 0.8949
Epoch 95/150
4900/4900 [==============================] - 79s - loss: 0.3443 - acc: 0.8949
Epoch 96/150
4900/4900 [==============================] - 80s - loss: 0.3436 - acc: 0.8949
Epoch 97/150
4900/4900 [==============================] - 80s - loss: 0.3436 - acc: 0.8949
Epoch 98/150
4900/4900 [==============================] - 80s - loss: 0.3436 - acc: 0.8949
Epoch 99/150
4900/4900 [==============================] - 80s - loss: 0.3448 - acc: 0.8949
Epoch 100/150
4900/4900 [==============================] - 80s - loss: 0.3443 - acc: 0.8949
Epoch 101/150
4900/4900 [==============================] - 84s - loss: 0.3426 - acc: 0.8949
Epoch 102/150
4900/4900 [==============================] - 88s - loss: 0.3447 - acc: 0.8949
Epoch 103/150
4900/4900 [==============================] - 86s - loss: 0.3436 - acc: 0.8949
Epoch 104/150
4900/4900 [==============================] - 80s - loss: 0.3444 - acc: 0.8949
Epoch 105/150
4900/4900 [==============================] - 80s - loss: 0.3430 - acc: 0.8949
Epoch 106/150
4900/4900 [==============================] - 80s - loss: 0.3437 - acc: 0.8949
Epoch 107/150
4900/4900 [==============================] - 81s - loss: 0.3433 - acc: 0.8949
Epoch 108/150
4900/4900 [==============================] - 80s - loss: 0.3420 - acc: 0.8949
Epoch 109/150
4900/4900 [==============================] - 80s - loss: 0.3426 - acc: 0.8949
Epoch 110/150
4900/4900 [==============================] - 81s - loss: 0.3435 - acc: 0.8949
Epoch 111/150
4900/4900 [==============================] - 80s - loss: 0.3438 - acc: 0.8949
Epoch 112/150
4900/4900 [==============================] - 80s - loss: 0.3435 - acc: 0.8949
Epoch 113/150
4900/4900 [==============================] - 79s - loss: 0.3437 - acc: 0.8949
Epoch 114/150
4900/4900 [==============================] - 81s - loss: 0.3436 - acc: 0.8949
Epoch 115/150
4900/4900 [==============================] - 81s - loss: 0.3427 - acc: 0.8949
Epoch 116/150
4900/4900 [==============================] - 80s - loss: 0.3429 - acc: 0.8949
Epoch 117/150
4900/4900 [==============================] - 81s - loss: 0.3428 - acc: 0.8949
Epoch 118/150
4900/4900 [==============================] - 80s - loss: 0.3435 - acc: 0.8949
Epoch 119/150
4900/4900 [==============================] - 80s - loss: 0.3431 - acc: 0.8949
Epoch 120/150
4900/4900 [==============================] - 81s - loss: 0.3675 - acc: 0.8949
Epoch 121/150
4900/4900 [==============================] - 81s - loss: 0.3822 - acc: 0.8949
Epoch 122/150
4900/4900 [==============================] - 80s - loss: 0.3546 - acc: 0.8949
Epoch 123/150
4900/4900 [==============================] - 81s - loss: 0.3493 - acc: 0.8949
Epoch 124/150
4900/4900 [==============================] - 82s - loss: 0.3473 - acc: 0.8949
Epoch 125/150
4900/4900 [==============================] - 87s - loss: 0.3480 - acc: 0.8949
Epoch 126/150
4900/4900 [==============================] - 86s - loss: 0.3455 - acc: 0.8949
Epoch 127/150
4900/4900 [==============================] - 87s - loss: 0.3456 - acc: 0.8949
Epoch 128/150
4900/4900 [==============================] - 86s - loss: 0.3456 - acc: 0.8949
Epoch 129/150
4900/4900 [==============================] - 85s - loss: 0.3460 - acc: 0.8949
Epoch 130/150
4900/4900 [==============================] - 86s - loss: 0.3444 - acc: 0.8949
Epoch 131/150
4900/4900 [==============================] - 86s - loss: 0.3453 - acc: 0.8949
Epoch 132/150
4900/4900 [==============================] - 86s - loss: 0.3456 - acc: 0.8949
Epoch 133/150
4900/4900 [==============================] - 86s - loss: 0.3462 - acc: 0.8949
Epoch 134/150
4900/4900 [==============================] - 86s - loss: 0.3438 - acc: 0.8949
Epoch 135/150
4900/4900 [==============================] - 86s - loss: 0.3454 - acc: 0.8949
Epoch 136/150
4900/4900 [==============================] - 86s - loss: 0.3445 - acc: 0.8949
Epoch 137/150
4900/4900 [==============================] - 86s - loss: 0.3446 - acc: 0.8949
Epoch 138/150
4900/4900 [==============================] - 86s - loss: 0.3453 - acc: 0.8949
Epoch 139/150
4900/4900 [==============================] - 85s - loss: 0.3453 - acc: 0.8949
Epoch 140/150
4900/4900 [==============================] - 86s - loss: 0.3438 - acc: 0.8949
Epoch 141/150
4900/4900 [==============================] - 85s - loss: 0.3440 - acc: 0.8949
Epoch 142/150
4900/4900 [==============================] - 86s - loss: 0.3436 - acc: 0.8949
Epoch 143/150
4900/4900 [==============================] - 86s - loss: 0.3441 - acc: 0.8949
Epoch 144/150
4900/4900 [==============================] - 87s - loss: 0.3455 - acc: 0.8949
Epoch 145/150
4900/4900 [==============================] - 87s - loss: 0.3446 - acc: 0.8949
Epoch 146/150
4900/4900 [==============================] - 86s - loss: 0.3435 - acc: 0.8949
Epoch 147/150
4900/4900 [==============================] - 86s - loss: 0.3424 - acc: 0.8949
Epoch 148/150
4900/4900 [==============================] - 86s - loss: 0.3430 - acc: 0.8949
Epoch 149/150
4900/4900 [==============================] - 86s - loss: 0.3448 - acc: 0.8949
Epoch 150/150
4900/4900 [==============================] - 86s - loss: 0.3432 - acc: 0.8949
Out[6]:
<keras.callbacks.History at 0x2098d80d0>
In [7]:
y_pred = model.predict(X_test, batch_size)
In [8]:
print confusion_matrix(y_test,y_pred>0.5)/float(num_samples)
fpr, tpr, _ = roc_curve(y_test, y_pred)
roc_auc = auc(fpr, tpr)
print roc_auc
[[ 0.95081633 0. ]
[ 0.04918367 0. ]]
0.567354578075
In [9]:
plt.figure()
lw = 2
plt.plot(fpr, tpr, color='darkorange',
lw=lw, label='ROC curve (area = %0.2f)' % roc_auc)
plt.plot([0, 1], [0, 1], color='navy', lw=lw, linestyle='--')
plt.xlim([0.0, 1.0])
plt.ylim([0.0, 1.05])
plt.xlabel('False Positive Rate')
plt.ylabel('True Positive Rate')
plt.title('Receiver operating characteristic example')
plt.legend(loc="lower right")
plt.show()
In [ ]:
Content source: dhudsmith/marketModel
Similar notebooks: