In [3]:
#import statements
from keras.layers.core import Dense, Activation, Dropout
from keras.layers.recurrent import LSTM
from keras.models import Sequential
import lstm, time
import pandas as pd
import matplotlib as plt
from sklearn.preprocessing import StandardScaler
import numpy as np
#read the annotated file
spx_data = pd.read_csv('data/spx_data.csv')
We have 6 features: The opening price, the daily high, low, the adjusted closing price, the volume and finally the actual closing price
We will first preprocess our data and then build a LSTM network in order to model the fluctuations of the S&P 500 as a time series
Here's a look at what the data looks like:
In [4]:
spx_data.head()
Out[4]:
Open
High
Low
Close
Adj Close
Volume
0
2251.570068
2263.879883
2245.129883
2257.830078
2257.830078
3770530000
1
2261.600098
2272.820068
2261.600098
2270.750000
2270.750000
3764890000
2
2268.179932
2271.500000
2260.449951
2269.000000
2269.000000
3761820000
3
2271.139893
2282.100098
2264.060059
2276.979980
2276.979980
3339890000
4
2273.590088
2275.489990
2268.899902
2268.899902
2268.899902
3217610000
now we can proceed with the analysis
In [5]:
# reorder data frame to put the closing price at the end of the arrays
spx_data = spx_data[['Open', 'High', 'Low', 'Adj Close', 'Volume', 'Close']]
spx_data_close = spx_data[['Close']]
spx_data
Out[5]:
Open
High
Low
Adj Close
Volume
Close
0
2251.570068
2263.879883
2245.129883
2257.830078
3770530000
2257.830078
1
2261.600098
2272.820068
2261.600098
2270.750000
3764890000
2270.750000
2
2268.179932
2271.500000
2260.449951
2269.000000
3761820000
2269.000000
3
2271.139893
2282.100098
2264.060059
2276.979980
3339890000
2276.979980
4
2273.590088
2275.489990
2268.899902
2268.899902
3217610000
2268.899902
5
2269.719971
2279.270020
2265.270020
2268.899902
3638790000
2268.899902
6
2268.600098
2275.320068
2260.830078
2275.320068
3620410000
2275.320068
7
2271.139893
2271.780029
2254.250000
2270.439941
3462130000
2270.439941
8
2272.739990
2278.679932
2271.510010
2274.639893
3081270000
2274.639893
9
2269.139893
2272.080078
2262.810059
2267.889893
3584990000
2267.889893
10
2269.139893
2272.010010
2263.350098
2271.889893
3315250000
2271.889893
11
2271.899902
2274.330078
2258.409912
2263.689941
3165970000
2263.689941
12
2269.959961
2276.959961
2265.010010
2271.310059
3524970000
2271.310059
13
2267.780029
2271.780029
2257.020020
2265.199951
3152710000
2265.199951
14
2267.879883
2284.629883
2266.679932
2280.070068
3810960000
2280.070068
15
2288.879883
2299.550049
2288.879883
2298.370117
3846020000
2298.370117
16
2298.629883
2300.989990
2294.080078
2296.679932
3610360000
2296.679932
17
2299.020020
2299.020020
2291.620117
2294.689941
3135890000
2294.689941
18
2286.010010
2286.010010
2268.040039
2280.899902
3591270000
2280.899902
19
2274.020020
2279.090088
2267.209961
2278.870117
4087450000
2278.870117
20
2285.590088
2289.139893
2272.439941
2279.550049
3916610000
2279.550049
21
2276.689941
2283.969971
2271.649902
2280.850098
3807710000
2280.850098
22
2288.540039
2298.310059
2287.879883
2297.419922
3597970000
2297.419922
23
2294.280029
2296.179932
2288.570068
2292.560059
3109050000
2292.560059
24
2295.870117
2299.399902
2290.159912
2293.080078
3448690000
2293.080078
25
2289.550049
2295.909912
2285.379883
2294.669922
3609740000
2294.669922
26
2296.699951
2311.080078
2296.610107
2307.870117
3677940000
2307.870117
27
2312.270020
2319.229980
2311.100098
2316.100098
3475020000
2316.100098
28
2321.719971
2331.580078
2321.419922
2328.250000
3349730000
2328.250000
29
2326.120117
2337.580078
2322.169922
2337.580078
3520910000
2337.580078
...
...
...
...
...
...
...
95
2371.370117
2389.060059
2370.429932
2381.729980
3825160000
2381.729980
96
2387.209961
2395.459961
2386.919922
2394.020020
3172830000
2394.020020
97
2397.040039
2400.850098
2393.879883
2398.419922
3213570000
2398.419922
98
2401.409912
2405.580078
2397.989990
2404.389893
3389900000
2404.389893
99
2409.540039
2418.709961
2408.010010
2415.070068
3535390000
2415.070068
100
2414.500000
2416.679932
2412.199951
2415.820068
2805040000
2415.820068
101
2411.669922
2415.260010
2409.429932
2412.909912
3203160000
2412.909912
102
2415.629883
2415.989990
2403.590088
2411.800049
4516110000
2411.800049
103
2415.649902
2430.060059
2413.540039
2430.060059
3857140000
2430.060059
104
2431.280029
2440.229980
2427.709961
2439.070068
3461680000
2439.070068
105
2437.830078
2439.550049
2434.320068
2436.100098
2912600000
2436.100098
106
2431.919922
2436.209961
2428.120117
2429.330078
3357840000
2429.330078
107
2432.030029
2435.280029
2424.750000
2433.139893
3572300000
2433.139893
108
2434.270020
2439.270020
2427.939941
2433.790039
3728860000
2433.790039
109
2436.389893
2446.199951
2415.699951
2431.770020
4027340000
2431.770020
110
2425.879883
2430.379883
2419.969971
2429.389893
4027750000
2429.389893
111
2434.149902
2441.489990
2431.280029
2440.350098
3275500000
2440.350098
112
2443.750000
2443.750000
2428.340088
2437.919922
3555590000
2437.919922
113
2424.139893
2433.949951
2418.530029
2432.459961
3353050000
2432.459961
114
2431.239990
2433.149902
2422.879883
2433.149902
5284720000
2433.149902
115
2442.550049
2453.820068
2441.790039
2453.459961
3264700000
2453.459961
116
2450.659912
2450.659912
2436.600098
2437.030029
3416510000
2437.030029
117
2439.310059
2442.229980
2430.739990
2435.610107
3594820000
2435.610107
118
2437.399902
2441.620117
2433.270020
2434.500000
3468210000
2434.500000
119
2434.649902
2441.399902
2431.110107
2438.300049
5278330000
2438.300049
120
2443.320068
2450.419922
2437.030029
2439.070068
3238970000
2439.070068
121
2436.340088
2440.149902
2419.379883
2419.379883
3563910000
2419.379883
122
2428.699951
2442.969971
2428.020020
2440.689941
3500800000
2440.689941
123
2442.379883
2442.729980
2405.699951
2419.699951
3900280000
2419.699951
124
2431.389893
2439.169922
2428.689941
2429.010010
1962290000
2429.010010
125 rows × 6 columns
In [6]:
plt.pyplot.plot(spx_data_close)
plt.pyplot.show()
#this is a plot of the closing prices over time
In [7]:
#normalize the data
normalized_spx_data = StandardScaler().fit_transform(spx_data)
In [8]:
## N.B the function below is a modifed form of a helped function written by Jakob Aungier
## (full citation in lstm.py)
def prep_data(data, seq_len):
"""
Returns the input data and targets for testing and validation sets
data -> the full corpus of data
seq_len -> the length of our sequence
"""
sequence_length = seq_len + 1
result = []
for index in range(len(data) - sequence_length):
result.append(data[index: index + sequence_length])
result = np.array(result)
# 90% train, 10% validation partition
partition = round(0.9 * result.shape[0])
train = result[:int(partition), :]
np.random.shuffle(train)
x_train = train[:, :-1]
y_train = train[:, -1, -1]
x_test = result[int(partition):, :-1]
y_test = result[int(partition):, -1, -1]
x_train = np.reshape(x_train, (x_train.shape[0], x_train.shape[1], 6))
x_test = np.reshape(x_test, (x_test.shape[0], x_test.shape[1], 6))
return [x_train, y_train, x_test, y_test]
In [9]:
X_train, y_train, X_test, y_test = prep_data(normalized_spx_data, 20)
In [10]:
#building Model
model = Sequential()
model.add(LSTM(input_dim=6,
output_dim=50,
return_sequences=True))
model.add(Dropout(0.2))
model.add(LSTM(100, return_sequences=False))
model.add(Dropout(0.2))
model.add(Dense(
output_dim=1))
model.add(Activation('linear'))
start = time.time()
model.compile(loss='mse', optimizer='rmsprop')
print('compilation time:', time.time() - start)
compilation time: 0.033596038818359375
In [11]:
#Train the model
model.fit(
X_train,
y_train,
batch_size=512,
nb_epoch=200,
validation_split=0.05)
Train on 89 samples, validate on 5 samples
Epoch 1/200
89/89 [==============================] - 1s - loss: 0.5014 - val_loss: 0.2043
Epoch 2/200
89/89 [==============================] - 0s - loss: 0.2091 - val_loss: 0.1904
Epoch 3/200
89/89 [==============================] - 0s - loss: 0.1541 - val_loss: 0.1802
Epoch 4/200
89/89 [==============================] - 0s - loss: 0.1293 - val_loss: 0.1569
Epoch 5/200
89/89 [==============================] - 0s - loss: 0.1235 - val_loss: 0.1515
Epoch 6/200
89/89 [==============================] - 0s - loss: 0.1204 - val_loss: 0.1435
Epoch 7/200
89/89 [==============================] - 0s - loss: 0.1018 - val_loss: 0.1466
Epoch 8/200
89/89 [==============================] - 0s - loss: 0.1100 - val_loss: 0.1070
Epoch 9/200
89/89 [==============================] - 0s - loss: 0.1028 - val_loss: 0.1762
Epoch 10/200
89/89 [==============================] - 0s - loss: 0.1309 - val_loss: 0.0904
Epoch 11/200
89/89 [==============================] - 0s - loss: 0.1374 - val_loss: 0.1427
Epoch 12/200
89/89 [==============================] - 0s - loss: 0.1034 - val_loss: 0.1126
Epoch 13/200
89/89 [==============================] - 0s - loss: 0.0925 - val_loss: 0.1172
Epoch 14/200
89/89 [==============================] - 0s - loss: 0.0875 - val_loss: 0.1132
Epoch 15/200
89/89 [==============================] - 0s - loss: 0.0860 - val_loss: 0.1165
Epoch 16/200
89/89 [==============================] - 0s - loss: 0.0824 - val_loss: 0.1018
Epoch 17/200
89/89 [==============================] - 0s - loss: 0.0944 - val_loss: 0.1086
Epoch 18/200
89/89 [==============================] - 0s - loss: 0.0897 - val_loss: 0.0771
Epoch 19/200
89/89 [==============================] - 0s - loss: 0.0924 - val_loss: 0.1464
Epoch 20/200
89/89 [==============================] - 0s - loss: 0.1018 - val_loss: 0.0745
Epoch 21/200
89/89 [==============================] - 0s - loss: 0.1106 - val_loss: 0.1236
Epoch 22/200
89/89 [==============================] - 0s - loss: 0.0853 - val_loss: 0.0866
Epoch 23/200
89/89 [==============================] - 0s - loss: 0.0885 - val_loss: 0.1082
Epoch 24/200
89/89 [==============================] - 0s - loss: 0.0748 - val_loss: 0.0816
Epoch 25/200
89/89 [==============================] - 0s - loss: 0.0802 - val_loss: 0.1179
Epoch 26/200
89/89 [==============================] - 0s - loss: 0.0810 - val_loss: 0.0711
Epoch 27/200
89/89 [==============================] - 0s - loss: 0.0994 - val_loss: 0.1068
Epoch 28/200
89/89 [==============================] - 0s - loss: 0.0749 - val_loss: 0.0923
Epoch 29/200
89/89 [==============================] - 0s - loss: 0.0684 - val_loss: 0.0889
Epoch 30/200
89/89 [==============================] - 0s - loss: 0.0699 - val_loss: 0.0780
Epoch 31/200
89/89 [==============================] - 0s - loss: 0.0717 - val_loss: 0.0791
Epoch 32/200
89/89 [==============================] - 0s - loss: 0.0593 - val_loss: 0.0645
Epoch 33/200
89/89 [==============================] - 0s - loss: 0.0566 - val_loss: 0.0800
Epoch 34/200
89/89 [==============================] - 0s - loss: 0.0661 - val_loss: 0.0626
Epoch 35/200
89/89 [==============================] - 0s - loss: 0.0750 - val_loss: 0.0957
Epoch 36/200
89/89 [==============================] - 0s - loss: 0.1030 - val_loss: 0.0617
Epoch 37/200
89/89 [==============================] - 0s - loss: 0.1200 - val_loss: 0.0814
Epoch 38/200
89/89 [==============================] - 0s - loss: 0.0557 - val_loss: 0.0706
Epoch 39/200
89/89 [==============================] - 0s - loss: 0.0567 - val_loss: 0.0722
Epoch 40/200
89/89 [==============================] - 0s - loss: 0.0566 - val_loss: 0.0635
Epoch 41/200
89/89 [==============================] - 0s - loss: 0.0646 - val_loss: 0.0765
Epoch 42/200
89/89 [==============================] - 0s - loss: 0.0570 - val_loss: 0.0469
Epoch 43/200
89/89 [==============================] - 0s - loss: 0.0576 - val_loss: 0.0698
Epoch 44/200
89/89 [==============================] - 0s - loss: 0.0525 - val_loss: 0.0574
Epoch 45/200
89/89 [==============================] - 0s - loss: 0.0620 - val_loss: 0.0583
Epoch 46/200
89/89 [==============================] - 0s - loss: 0.0509 - val_loss: 0.0559
Epoch 47/200
89/89 [==============================] - 0s - loss: 0.0505 - val_loss: 0.0728
Epoch 48/200
89/89 [==============================] - 0s - loss: 0.0599 - val_loss: 0.0612
Epoch 49/200
89/89 [==============================] - 0s - loss: 0.0775 - val_loss: 0.1010
Epoch 50/200
89/89 [==============================] - 0s - loss: 0.0863 - val_loss: 0.0485
Epoch 51/200
89/89 [==============================] - 0s - loss: 0.0806 - val_loss: 0.0556
Epoch 52/200
89/89 [==============================] - 0s - loss: 0.0577 - val_loss: 0.0625
Epoch 53/200
89/89 [==============================] - 0s - loss: 0.0499 - val_loss: 0.0455
Epoch 54/200
89/89 [==============================] - 0s - loss: 0.0534 - val_loss: 0.0653
Epoch 55/200
89/89 [==============================] - 0s - loss: 0.0506 - val_loss: 0.0586
Epoch 56/200
89/89 [==============================] - 0s - loss: 0.0423 - val_loss: 0.0548
Epoch 57/200
89/89 [==============================] - 0s - loss: 0.0441 - val_loss: 0.0730
Epoch 58/200
89/89 [==============================] - 0s - loss: 0.0453 - val_loss: 0.0541
Epoch 59/200
89/89 [==============================] - 0s - loss: 0.0402 - val_loss: 0.0620
Epoch 60/200
89/89 [==============================] - 0s - loss: 0.0444 - val_loss: 0.0448
Epoch 61/200
89/89 [==============================] - 0s - loss: 0.0338 - val_loss: 0.0612
Epoch 62/200
89/89 [==============================] - 0s - loss: 0.0417 - val_loss: 0.0658
Epoch 63/200
89/89 [==============================] - 0s - loss: 0.0414 - val_loss: 0.0590
Epoch 64/200
89/89 [==============================] - 0s - loss: 0.0458 - val_loss: 0.0471
Epoch 65/200
89/89 [==============================] - 0s - loss: 0.0751 - val_loss: 0.0701
Epoch 66/200
89/89 [==============================] - 0s - loss: 0.0704 - val_loss: 0.0690
Epoch 67/200
89/89 [==============================] - 0s - loss: 0.0836 - val_loss: 0.0526
Epoch 68/200
89/89 [==============================] - 0s - loss: 0.0455 - val_loss: 0.0565
Epoch 69/200
89/89 [==============================] - 0s - loss: 0.0408 - val_loss: 0.0570
Epoch 70/200
89/89 [==============================] - 0s - loss: 0.0411 - val_loss: 0.0575
Epoch 71/200
89/89 [==============================] - 0s - loss: 0.0398 - val_loss: 0.0560
Epoch 72/200
89/89 [==============================] - 0s - loss: 0.0439 - val_loss: 0.0622
Epoch 73/200
89/89 [==============================] - 0s - loss: 0.0394 - val_loss: 0.0735
Epoch 74/200
89/89 [==============================] - 0s - loss: 0.0432 - val_loss: 0.0571
Epoch 75/200
89/89 [==============================] - 0s - loss: 0.0511 - val_loss: 0.0727
Epoch 76/200
89/89 [==============================] - 0s - loss: 0.0463 - val_loss: 0.0583
Epoch 77/200
89/89 [==============================] - 0s - loss: 0.0515 - val_loss: 0.0602
Epoch 78/200
89/89 [==============================] - 0s - loss: 0.0467 - val_loss: 0.0671
Epoch 79/200
89/89 [==============================] - 0s - loss: 0.0394 - val_loss: 0.0526
Epoch 80/200
89/89 [==============================] - 0s - loss: 0.0386 - val_loss: 0.0635
Epoch 81/200
89/89 [==============================] - 0s - loss: 0.0456 - val_loss: 0.0516
Epoch 82/200
89/89 [==============================] - 0s - loss: 0.0434 - val_loss: 0.0826
Epoch 83/200
89/89 [==============================] - 0s - loss: 0.0391 - val_loss: 0.0563
Epoch 84/200
89/89 [==============================] - 0s - loss: 0.0356 - val_loss: 0.0646
Epoch 85/200
89/89 [==============================] - 0s - loss: 0.0342 - val_loss: 0.0628
Epoch 86/200
89/89 [==============================] - 0s - loss: 0.0366 - val_loss: 0.0766
Epoch 87/200
89/89 [==============================] - 0s - loss: 0.0415 - val_loss: 0.0545
Epoch 88/200
89/89 [==============================] - 0s - loss: 0.0519 - val_loss: 0.0665
Epoch 89/200
89/89 [==============================] - 0s - loss: 0.0391 - val_loss: 0.0465
Epoch 90/200
89/89 [==============================] - 0s - loss: 0.0405 - val_loss: 0.0683
Epoch 91/200
89/89 [==============================] - 0s - loss: 0.0327 - val_loss: 0.0564
Epoch 92/200
89/89 [==============================] - 0s - loss: 0.0465 - val_loss: 0.0685
Epoch 93/200
89/89 [==============================] - 0s - loss: 0.0516 - val_loss: 0.0687
Epoch 94/200
89/89 [==============================] - 0s - loss: 0.0647 - val_loss: 0.0554
Epoch 95/200
89/89 [==============================] - 0s - loss: 0.0345 - val_loss: 0.0538
Epoch 96/200
89/89 [==============================] - 0s - loss: 0.0307 - val_loss: 0.0585
Epoch 97/200
89/89 [==============================] - 0s - loss: 0.0298 - val_loss: 0.0589
Epoch 98/200
89/89 [==============================] - 0s - loss: 0.0342 - val_loss: 0.0659
Epoch 99/200
89/89 [==============================] - 0s - loss: 0.0380 - val_loss: 0.0513
Epoch 100/200
89/89 [==============================] - 0s - loss: 0.0398 - val_loss: 0.0671
Epoch 101/200
89/89 [==============================] - 0s - loss: 0.0389 - val_loss: 0.0473
Epoch 102/200
89/89 [==============================] - 0s - loss: 0.0350 - val_loss: 0.0684
Epoch 103/200
89/89 [==============================] - 0s - loss: 0.0305 - val_loss: 0.0644
Epoch 104/200
89/89 [==============================] - 0s - loss: 0.0302 - val_loss: 0.0643
Epoch 105/200
89/89 [==============================] - 0s - loss: 0.0415 - val_loss: 0.0793
Epoch 106/200
89/89 [==============================] - 0s - loss: 0.0381 - val_loss: 0.0618
Epoch 107/200
89/89 [==============================] - 0s - loss: 0.0363 - val_loss: 0.0545
Epoch 108/200
89/89 [==============================] - 0s - loss: 0.0434 - val_loss: 0.0738
Epoch 109/200
89/89 [==============================] - 0s - loss: 0.0489 - val_loss: 0.0577
Epoch 110/200
89/89 [==============================] - 0s - loss: 0.0376 - val_loss: 0.0653
Epoch 111/200
89/89 [==============================] - 0s - loss: 0.0454 - val_loss: 0.0737
Epoch 112/200
89/89 [==============================] - 0s - loss: 0.0378 - val_loss: 0.0640
Epoch 113/200
89/89 [==============================] - 0s - loss: 0.0429 - val_loss: 0.0681
Epoch 114/200
89/89 [==============================] - 0s - loss: 0.0414 - val_loss: 0.0627
Epoch 115/200
89/89 [==============================] - 0s - loss: 0.0400 - val_loss: 0.0672
Epoch 116/200
89/89 [==============================] - 0s - loss: 0.0380 - val_loss: 0.0609
Epoch 117/200
89/89 [==============================] - 0s - loss: 0.0407 - val_loss: 0.0656
Epoch 118/200
89/89 [==============================] - 0s - loss: 0.0352 - val_loss: 0.0584
Epoch 119/200
89/89 [==============================] - 0s - loss: 0.0386 - val_loss: 0.0585
Epoch 120/200
89/89 [==============================] - 0s - loss: 0.0327 - val_loss: 0.0628
Epoch 121/200
89/89 [==============================] - 0s - loss: 0.0375 - val_loss: 0.0664
Epoch 122/200
89/89 [==============================] - 0s - loss: 0.0300 - val_loss: 0.0697
Epoch 123/200
89/89 [==============================] - 0s - loss: 0.0403 - val_loss: 0.0589
Epoch 124/200
89/89 [==============================] - 0s - loss: 0.0339 - val_loss: 0.0847
Epoch 125/200
89/89 [==============================] - 0s - loss: 0.0375 - val_loss: 0.0633
Epoch 126/200
89/89 [==============================] - 0s - loss: 0.0298 - val_loss: 0.0669
Epoch 127/200
89/89 [==============================] - 0s - loss: 0.0354 - val_loss: 0.0682
Epoch 128/200
89/89 [==============================] - 0s - loss: 0.0434 - val_loss: 0.0577
Epoch 129/200
89/89 [==============================] - 0s - loss: 0.0459 - val_loss: 0.0641
Epoch 130/200
89/89 [==============================] - 0s - loss: 0.0306 - val_loss: 0.0583
Epoch 131/200
89/89 [==============================] - 0s - loss: 0.0316 - val_loss: 0.0643
Epoch 132/200
89/89 [==============================] - 0s - loss: 0.0301 - val_loss: 0.0586
Epoch 133/200
89/89 [==============================] - 0s - loss: 0.0275 - val_loss: 0.0758
Epoch 134/200
89/89 [==============================] - 0s - loss: 0.0327 - val_loss: 0.0631
Epoch 135/200
89/89 [==============================] - 0s - loss: 0.0359 - val_loss: 0.0736
Epoch 136/200
89/89 [==============================] - 0s - loss: 0.0380 - val_loss: 0.0662
Epoch 137/200
89/89 [==============================] - 0s - loss: 0.0318 - val_loss: 0.0532
Epoch 138/200
89/89 [==============================] - 0s - loss: 0.0382 - val_loss: 0.0722
Epoch 139/200
89/89 [==============================] - 0s - loss: 0.0428 - val_loss: 0.0666
Epoch 140/200
89/89 [==============================] - 0s - loss: 0.0398 - val_loss: 0.0619
Epoch 141/200
89/89 [==============================] - 0s - loss: 0.0444 - val_loss: 0.0587
Epoch 142/200
89/89 [==============================] - 0s - loss: 0.0346 - val_loss: 0.0599
Epoch 143/200
89/89 [==============================] - 0s - loss: 0.0323 - val_loss: 0.0633
Epoch 144/200
89/89 [==============================] - 0s - loss: 0.0337 - val_loss: 0.0730
Epoch 145/200
89/89 [==============================] - 0s - loss: 0.0275 - val_loss: 0.0696
Epoch 146/200
89/89 [==============================] - 0s - loss: 0.0313 - val_loss: 0.0737
Epoch 147/200
89/89 [==============================] - 0s - loss: 0.0308 - val_loss: 0.0575
Epoch 148/200
89/89 [==============================] - 0s - loss: 0.0262 - val_loss: 0.0785
Epoch 149/200
89/89 [==============================] - 0s - loss: 0.0322 - val_loss: 0.0676
Epoch 150/200
89/89 [==============================] - 0s - loss: 0.0344 - val_loss: 0.0657
Epoch 151/200
89/89 [==============================] - 0s - loss: 0.0261 - val_loss: 0.0652
Epoch 152/200
89/89 [==============================] - 0s - loss: 0.0250 - val_loss: 0.0708
Epoch 153/200
89/89 [==============================] - 0s - loss: 0.0366 - val_loss: 0.0682
Epoch 154/200
89/89 [==============================] - 0s - loss: 0.0393 - val_loss: 0.0669
Epoch 155/200
89/89 [==============================] - 0s - loss: 0.0341 - val_loss: 0.0642
Epoch 156/200
89/89 [==============================] - 0s - loss: 0.0387 - val_loss: 0.0609
Epoch 157/200
89/89 [==============================] - 0s - loss: 0.0381 - val_loss: 0.0682
Epoch 158/200
89/89 [==============================] - 0s - loss: 0.0344 - val_loss: 0.0583
Epoch 159/200
89/89 [==============================] - 0s - loss: 0.0318 - val_loss: 0.0582
Epoch 160/200
89/89 [==============================] - 0s - loss: 0.0325 - val_loss: 0.0598
Epoch 161/200
89/89 [==============================] - 0s - loss: 0.0296 - val_loss: 0.0567
Epoch 162/200
89/89 [==============================] - 0s - loss: 0.0319 - val_loss: 0.0654
Epoch 163/200
89/89 [==============================] - 0s - loss: 0.0280 - val_loss: 0.0631
Epoch 164/200
89/89 [==============================] - 0s - loss: 0.0287 - val_loss: 0.0731
Epoch 165/200
89/89 [==============================] - 0s - loss: 0.0276 - val_loss: 0.0804
Epoch 166/200
89/89 [==============================] - 0s - loss: 0.0295 - val_loss: 0.0614
Epoch 167/200
89/89 [==============================] - 0s - loss: 0.0284 - val_loss: 0.0742
Epoch 168/200
89/89 [==============================] - 0s - loss: 0.0295 - val_loss: 0.0654
Epoch 169/200
89/89 [==============================] - 0s - loss: 0.0341 - val_loss: 0.0660
Epoch 170/200
89/89 [==============================] - 0s - loss: 0.0427 - val_loss: 0.0588
Epoch 171/200
89/89 [==============================] - 0s - loss: 0.0307 - val_loss: 0.0586
Epoch 172/200
89/89 [==============================] - 0s - loss: 0.0362 - val_loss: 0.0754
Epoch 173/200
89/89 [==============================] - 0s - loss: 0.0265 - val_loss: 0.0669
Epoch 174/200
89/89 [==============================] - 0s - loss: 0.0226 - val_loss: 0.0717
Epoch 175/200
89/89 [==============================] - 0s - loss: 0.0233 - val_loss: 0.0661
Epoch 176/200
89/89 [==============================] - 0s - loss: 0.0199 - val_loss: 0.0629
Epoch 177/200
89/89 [==============================] - 0s - loss: 0.0280 - val_loss: 0.0718
Epoch 178/200
89/89 [==============================] - 0s - loss: 0.0329 - val_loss: 0.0620
Epoch 179/200
89/89 [==============================] - 0s - loss: 0.0328 - val_loss: 0.0701
Epoch 180/200
89/89 [==============================] - 0s - loss: 0.0372 - val_loss: 0.0591
Epoch 181/200
89/89 [==============================] - 0s - loss: 0.0410 - val_loss: 0.0509
Epoch 182/200
89/89 [==============================] - 0s - loss: 0.0400 - val_loss: 0.0542
Epoch 183/200
89/89 [==============================] - 0s - loss: 0.0278 - val_loss: 0.0648
Epoch 184/200
89/89 [==============================] - 0s - loss: 0.0277 - val_loss: 0.0657
Epoch 185/200
89/89 [==============================] - 0s - loss: 0.0239 - val_loss: 0.0633
Epoch 186/200
89/89 [==============================] - 0s - loss: 0.0202 - val_loss: 0.0611
Epoch 187/200
89/89 [==============================] - 0s - loss: 0.0241 - val_loss: 0.0533
Epoch 188/200
89/89 [==============================] - 0s - loss: 0.0216 - val_loss: 0.0620
Epoch 189/200
89/89 [==============================] - 0s - loss: 0.0218 - val_loss: 0.0657
Epoch 190/200
89/89 [==============================] - 0s - loss: 0.0261 - val_loss: 0.0540
Epoch 191/200
89/89 [==============================] - 0s - loss: 0.0358 - val_loss: 0.0662
Epoch 192/200
89/89 [==============================] - 0s - loss: 0.0328 - val_loss: 0.0617
Epoch 193/200
89/89 [==============================] - 0s - loss: 0.0326 - val_loss: 0.0519
Epoch 194/200
89/89 [==============================] - 0s - loss: 0.0357 - val_loss: 0.0606
Epoch 195/200
89/89 [==============================] - 0s - loss: 0.0279 - val_loss: 0.0545
Epoch 196/200
89/89 [==============================] - 0s - loss: 0.0310 - val_loss: 0.0548
Epoch 197/200
89/89 [==============================] - 0s - loss: 0.0285 - val_loss: 0.0607
Epoch 198/200
89/89 [==============================] - 0s - loss: 0.0259 - val_loss: 0.0594
Epoch 199/200
89/89 [==============================] - 0s - loss: 0.0287 - val_loss: 0.0676
Epoch 200/200
89/89 [==============================] - 0s - loss: 0.0244 - val_loss: 0.0666
Out[11]:
<keras.callbacks.History at 0x1216bbda0>
our validation loss function is small now relative to that of our test, which probably means that we have'nt overfitted our data, but we need to remember to take the normalization when making predictions with our model
Content source: ktinubu/Predict-Stock-With-LSTM
Similar notebooks: