In [85]:
%matplotlib inline
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
from sklearn.metrics import mean_squared_error
plt.rcParams["figure.figsize"] = (20, 10)

In [86]:
train = pd.read_csv('Google_Stock_Price_Train.csv')
train.info()


<class 'pandas.core.frame.DataFrame'>
RangeIndex: 1258 entries, 0 to 1257
Data columns (total 6 columns):
Date      1258 non-null object
Open      1258 non-null float64
High      1258 non-null float64
Low       1258 non-null float64
Close     1258 non-null object
Volume    1258 non-null object
dtypes: float64(3), object(3)
memory usage: 59.0+ KB

In [87]:
### to make label into a 2d numpy array
label = train.iloc[:,1:2]
label.head()


Out[87]:
Open
0 325.25
1 331.27
2 329.83
3 328.34
4 322.04

In [88]:
### Feature Scaling , Using Normalization for a better result
from sklearn.preprocessing import MinMaxScaler
sc = MinMaxScaler()
train = sc.fit_transform(label)

In [89]:
## for an lstm network t would be the train and validation shall be t+1 
x_train = train[0:1257]

In [90]:
y_train = train[1:1258]

In [91]:
x_train


Out[91]:
array([[ 0.08581368],
       [ 0.09701243],
       [ 0.09433366],
       ..., 
       [ 0.95163331],
       [ 0.95725128],
       [ 0.93796041]])

In [92]:
y_train


Out[92]:
array([[ 0.09701243],
       [ 0.09433366],
       [ 0.09156187],
       ..., 
       [ 0.95725128],
       [ 0.93796041],
       [ 0.93688146]])

The second price in x_train can be seen as the first price at y_Train


In [93]:
## for keras converting to 3d
x_train = np.reshape(x_train,(1257,1,1))

In [94]:
x_train


Out[94]:
array([[[ 0.08581368]],

       [[ 0.09701243]],

       [[ 0.09433366]],

       ..., 
       [[ 0.95163331]],

       [[ 0.95725128]],

       [[ 0.93796041]]])

Converted to 3d array


In [95]:
import tensorflow as tf
from keras.layers import Dense,LSTM
from keras.models import Sequential

In [96]:
reg = Sequential()
## Input layer
reg.add(LSTM(units =4,activation = 'sigmoid',input_shape = (None , 1)))
## Output layer
reg.add(Dense(units = 1,kernel_initializer='uniform'))
#compiling the rnn 
reg.compile(optimizer = 'adam',loss = 'mean_squared_error')

In [97]:
reg.fit(x_train,y_train,batch_size=40,epochs=200)


Epoch 1/200
1257/1257 [==============================] - 1s - loss: 0.2628     
Epoch 2/200
1257/1257 [==============================] - 0s - loss: 0.2118     
Epoch 3/200
1257/1257 [==============================] - 0s - loss: 0.1705     
Epoch 4/200
1257/1257 [==============================] - 0s - loss: 0.1384     
Epoch 5/200
1257/1257 [==============================] - 0s - loss: 0.1152     
Epoch 6/200
1257/1257 [==============================] - 0s - loss: 0.0994     
Epoch 7/200
1257/1257 [==============================] - 0s - loss: 0.0889     
Epoch 8/200
1257/1257 [==============================] - 0s - loss: 0.0827     
Epoch 9/200
1257/1257 [==============================] - 0s - loss: 0.0791     
Epoch 10/200
1257/1257 [==============================] - 0s - loss: 0.0770     
Epoch 11/200
1257/1257 [==============================] - 0s - loss: 0.0758     
Epoch 12/200
1257/1257 [==============================] - 0s - loss: 0.0749     
Epoch 13/200
1257/1257 [==============================] - 0s - loss: 0.0742     
Epoch 14/200
1257/1257 [==============================] - 0s - loss: 0.0735     
Epoch 15/200
1257/1257 [==============================] - 0s - loss: 0.0729     
Epoch 16/200
1257/1257 [==============================] - 0s - loss: 0.0722     
Epoch 17/200
1257/1257 [==============================] - 0s - loss: 0.0715     
Epoch 18/200
1257/1257 [==============================] - 0s - loss: 0.0709     
Epoch 19/200
1257/1257 [==============================] - 0s - loss: 0.0702     
Epoch 20/200
1257/1257 [==============================] - 0s - loss: 0.0695     
Epoch 21/200
1257/1257 [==============================] - 0s - loss: 0.0688     
Epoch 22/200
1257/1257 [==============================] - 0s - loss: 0.0681     
Epoch 23/200
1257/1257 [==============================] - 0s - loss: 0.0673     
Epoch 24/200
1257/1257 [==============================] - 0s - loss: 0.0666     
Epoch 25/200
1257/1257 [==============================] - 0s - loss: 0.0658     
Epoch 26/200
1257/1257 [==============================] - 0s - loss: 0.0650     
Epoch 27/200
1257/1257 [==============================] - 0s - loss: 0.0642     
Epoch 28/200
1257/1257 [==============================] - 0s - loss: 0.0634     
Epoch 29/200
1257/1257 [==============================] - 0s - loss: 0.0625     
Epoch 30/200
1257/1257 [==============================] - 0s - loss: 0.0616     
Epoch 31/200
1257/1257 [==============================] - 0s - loss: 0.0607     
Epoch 32/200
1257/1257 [==============================] - 0s - loss: 0.0598     
Epoch 33/200
1257/1257 [==============================] - 0s - loss: 0.0589     
Epoch 34/200
1257/1257 [==============================] - 0s - loss: 0.0579     
Epoch 35/200
1257/1257 [==============================] - 0s - loss: 0.0569     
Epoch 36/200
1257/1257 [==============================] - 0s - loss: 0.0559     
Epoch 37/200
1257/1257 [==============================] - 0s - loss: 0.0549     
Epoch 38/200
1257/1257 [==============================] - 0s - loss: 0.0538     
Epoch 39/200
1257/1257 [==============================] - 0s - loss: 0.0527     
Epoch 40/200
1257/1257 [==============================] - 0s - loss: 0.0516     
Epoch 41/200
1257/1257 [==============================] - 0s - loss: 0.0505     
Epoch 42/200
1257/1257 [==============================] - 0s - loss: 0.0494     
Epoch 43/200
1257/1257 [==============================] - 0s - loss: 0.0482     
Epoch 44/200
1257/1257 [==============================] - 0s - loss: 0.0471     
Epoch 45/200
1257/1257 [==============================] - 0s - loss: 0.0459     
Epoch 46/200
1257/1257 [==============================] - 0s - loss: 0.0447     
Epoch 47/200
1257/1257 [==============================] - 0s - loss: 0.0435     
Epoch 48/200
1257/1257 [==============================] - 0s - loss: 0.0422     
Epoch 49/200
1257/1257 [==============================] - 0s - loss: 0.0410     
Epoch 50/200
1257/1257 [==============================] - 0s - loss: 0.0397     
Epoch 51/200
1257/1257 [==============================] - 0s - loss: 0.0385     
Epoch 52/200
1257/1257 [==============================] - 0s - loss: 0.0372     
Epoch 53/200
1257/1257 [==============================] - 0s - loss: 0.0359     
Epoch 54/200
1257/1257 [==============================] - 0s - loss: 0.0346     
Epoch 55/200
1257/1257 [==============================] - ETA: 0s - loss: 0.033 - 0s - loss: 0.0333     
Epoch 56/200
1257/1257 [==============================] - 0s - loss: 0.0320     
Epoch 57/200
1257/1257 [==============================] - 0s - loss: 0.0307     
Epoch 58/200
1257/1257 [==============================] - 0s - loss: 0.0294     
Epoch 59/200
1257/1257 [==============================] - 0s - loss: 0.0282     
Epoch 60/200
1257/1257 [==============================] - 0s - loss: 0.0269     
Epoch 61/200
1257/1257 [==============================] - 0s - loss: 0.0257     
Epoch 62/200
1257/1257 [==============================] - 0s - loss: 0.0244     
Epoch 63/200
1257/1257 [==============================] - 0s - loss: 0.0232     
Epoch 64/200
1257/1257 [==============================] - 0s - loss: 0.0220     
Epoch 65/200
1257/1257 [==============================] - 0s - loss: 0.0209     
Epoch 66/200
1257/1257 [==============================] - 0s - loss: 0.0197     
Epoch 67/200
1257/1257 [==============================] - 0s - loss: 0.0186     
Epoch 68/200
1257/1257 [==============================] - 0s - loss: 0.0175     
Epoch 69/200
1257/1257 [==============================] - 0s - loss: 0.0165     
Epoch 70/200
1257/1257 [==============================] - 0s - loss: 0.0154     
Epoch 71/200
1257/1257 [==============================] - 0s - loss: 0.0144     
Epoch 72/200
1257/1257 [==============================] - 0s - loss: 0.0135     
Epoch 73/200
1257/1257 [==============================] - 0s - loss: 0.0126     
Epoch 74/200
1257/1257 [==============================] - 0s - loss: 0.0117     
Epoch 75/200
1257/1257 [==============================] - 0s - loss: 0.0108     
Epoch 76/200
1257/1257 [==============================] - 0s - loss: 0.0100     
Epoch 77/200
1257/1257 [==============================] - 0s - loss: 0.0093     
Epoch 78/200
1257/1257 [==============================] - 0s - loss: 0.0085     
Epoch 79/200
1257/1257 [==============================] - 0s - loss: 0.0079     
Epoch 80/200
1257/1257 [==============================] - 0s - loss: 0.0072     
Epoch 81/200
1257/1257 [==============================] - 0s - loss: 0.0066     
Epoch 82/200
1257/1257 [==============================] - 0s - loss: 0.0060     
Epoch 83/200
1257/1257 [==============================] - 0s - loss: 0.0055     
Epoch 84/200
1257/1257 [==============================] - 0s - loss: 0.0050     
Epoch 85/200
1257/1257 [==============================] - 0s - loss: 0.0046     
Epoch 86/200
1257/1257 [==============================] - 0s - loss: 0.0041     
Epoch 87/200
1257/1257 [==============================] - 0s - loss: 0.0038     
Epoch 88/200
1257/1257 [==============================] - 0s - loss: 0.0034     
Epoch 89/200
1257/1257 [==============================] - 0s - loss: 0.0031     
Epoch 90/200
1257/1257 [==============================] - 0s - loss: 0.0028     
Epoch 91/200
1257/1257 [==============================] - 0s - loss: 0.0025     
Epoch 92/200
1257/1257 [==============================] - 0s - loss: 0.0023     
Epoch 93/200
1257/1257 [==============================] - 0s - loss: 0.0020     
Epoch 94/200
1257/1257 [==============================] - 0s - loss: 0.0018     
Epoch 95/200
1257/1257 [==============================] - 0s - loss: 0.0017     
Epoch 96/200
1257/1257 [==============================] - 0s - loss: 0.0015     
Epoch 97/200
1257/1257 [==============================] - 0s - loss: 0.0014     
Epoch 98/200
1257/1257 [==============================] - 0s - loss: 0.0012     
Epoch 99/200
1257/1257 [==============================] - 0s - loss: 0.0011     
Epoch 100/200
1257/1257 [==============================] - 0s - loss: 0.0010         
Epoch 101/200
1257/1257 [==============================] - 0s - loss: 9.3839e-04     
Epoch 102/200
1257/1257 [==============================] - 0s - loss: 8.6318e-04     
Epoch 103/200
1257/1257 [==============================] - 0s - loss: 7.9895e-04     
Epoch 104/200
1257/1257 [==============================] - 0s - loss: 7.4098e-04     
Epoch 105/200
1257/1257 [==============================] - 0s - loss: 6.9269e-04     
Epoch 106/200
1257/1257 [==============================] - 0s - loss: 6.4856e-04     
Epoch 107/200
1257/1257 [==============================] - 0s - loss: 6.1060e-04     
Epoch 108/200
1257/1257 [==============================] - 0s - loss: 5.7762e-04     
Epoch 109/200
1257/1257 [==============================] - 0s - loss: 5.5041e-04     
Epoch 110/200
1257/1257 [==============================] - 0s - loss: 5.2564e-04     
Epoch 111/200
1257/1257 [==============================] - 0s - loss: 5.0416e-04     
Epoch 112/200
1257/1257 [==============================] - 0s - loss: 4.8564e-04     
Epoch 113/200
1257/1257 [==============================] - 0s - loss: 4.6902e-04     
Epoch 114/200
1257/1257 [==============================] - 0s - loss: 4.5495e-04     
Epoch 115/200
1257/1257 [==============================] - 0s - loss: 4.4224e-04     
Epoch 116/200
1257/1257 [==============================] - 0s - loss: 4.3117e-04     
Epoch 117/200
1257/1257 [==============================] - 0s - loss: 4.2082e-04     
Epoch 118/200
1257/1257 [==============================] - 0s - loss: 4.1187e-04     
Epoch 119/200
1257/1257 [==============================] - 0s - loss: 4.0472e-04     
Epoch 120/200
1257/1257 [==============================] - 0s - loss: 3.9612e-04     
Epoch 121/200
1257/1257 [==============================] - 0s - loss: 3.8916e-04     
Epoch 122/200
1257/1257 [==============================] - 0s - loss: 3.8348e-04     
Epoch 123/200
1257/1257 [==============================] - 0s - loss: 3.7753e-04     
Epoch 124/200
1257/1257 [==============================] - 0s - loss: 3.7108e-04     
Epoch 125/200
1257/1257 [==============================] - 0s - loss: 3.6576e-04     
Epoch 126/200
1257/1257 [==============================] - 0s - loss: 3.6064e-04     
Epoch 127/200
1257/1257 [==============================] - 0s - loss: 3.5622e-04     
Epoch 128/200
1257/1257 [==============================] - 0s - loss: 3.5227e-04     
Epoch 129/200
1257/1257 [==============================] - 0s - loss: 3.4819e-04     
Epoch 130/200
1257/1257 [==============================] - 0s - loss: 3.4376e-04     
Epoch 131/200
1257/1257 [==============================] - 0s - loss: 3.4106e-04     
Epoch 132/200
1257/1257 [==============================] - 0s - loss: 3.3671e-04     
Epoch 133/200
1257/1257 [==============================] - 0s - loss: 3.3397e-04     
Epoch 134/200
1257/1257 [==============================] - 0s - loss: 3.2981e-04     
Epoch 135/200
1257/1257 [==============================] - 0s - loss: 3.2749e-04     
Epoch 136/200
1257/1257 [==============================] - 0s - loss: 3.2417e-04     
Epoch 137/200
1257/1257 [==============================] - 0s - loss: 3.2334e-04     
Epoch 138/200
1257/1257 [==============================] - 0s - loss: 3.1929e-04     
Epoch 139/200
1257/1257 [==============================] - 0s - loss: 3.1814e-04     
Epoch 140/200
1257/1257 [==============================] - 0s - loss: 3.1474e-04     
Epoch 141/200
1257/1257 [==============================] - 0s - loss: 3.1338e-04     
Epoch 142/200
1257/1257 [==============================] - 0s - loss: 3.1103e-04     
Epoch 143/200
1257/1257 [==============================] - 0s - loss: 3.0967e-04     
Epoch 144/200
1257/1257 [==============================] - 0s - loss: 3.0844e-04     
Epoch 145/200
1257/1257 [==============================] - 0s - loss: 3.0671e-04     
Epoch 146/200
1257/1257 [==============================] - 0s - loss: 3.0465e-04     
Epoch 147/200
1257/1257 [==============================] - 0s - loss: 3.0314e-04     
Epoch 148/200
1257/1257 [==============================] - 0s - loss: 3.0014e-04     
Epoch 149/200
1257/1257 [==============================] - 0s - loss: 2.9918e-04     
Epoch 150/200
1257/1257 [==============================] - 0s - loss: 2.9740e-04     
Epoch 151/200
1257/1257 [==============================] - 0s - loss: 2.9632e-04     
Epoch 152/200
1257/1257 [==============================] - 0s - loss: 2.9481e-04     
Epoch 153/200
1257/1257 [==============================] - 0s - loss: 2.9398e-04     
Epoch 154/200
1257/1257 [==============================] - 0s - loss: 2.9241e-04     
Epoch 155/200
1257/1257 [==============================] - 0s - loss: 2.9240e-04     
Epoch 156/200
1257/1257 [==============================] - 0s - loss: 2.9025e-04     
Epoch 157/200
1257/1257 [==============================] - 0s - loss: 2.9002e-04     
Epoch 158/200
1257/1257 [==============================] - 0s - loss: 2.8877e-04     
Epoch 159/200
1257/1257 [==============================] - 0s - loss: 2.8751e-04     
Epoch 160/200
1257/1257 [==============================] - 0s - loss: 2.8666e-04     
Epoch 161/200
1257/1257 [==============================] - 0s - loss: 2.8680e-04     
Epoch 162/200
1257/1257 [==============================] - 0s - loss: 2.8662e-04     
Epoch 163/200
1257/1257 [==============================] - 0s - loss: 2.8431e-04     
Epoch 164/200
1257/1257 [==============================] - 0s - loss: 2.8279e-04     
Epoch 165/200
1257/1257 [==============================] - 0s - loss: 2.8304e-04     
Epoch 166/200
1257/1257 [==============================] - 0s - loss: 2.8257e-04     
Epoch 167/200
1257/1257 [==============================] - 0s - loss: 2.8104e-04     
Epoch 168/200
1257/1257 [==============================] - 0s - loss: 2.8215e-04     
Epoch 169/200
1257/1257 [==============================] - 0s - loss: 2.7851e-04     
Epoch 170/200
1257/1257 [==============================] - 0s - loss: 2.8108e-04     
Epoch 171/200
1257/1257 [==============================] - 0s - loss: 2.7781e-04     
Epoch 172/200
1257/1257 [==============================] - 0s - loss: 2.8021e-04     
Epoch 173/200
1257/1257 [==============================] - 0s - loss: 2.7917e-04     
Epoch 174/200
1257/1257 [==============================] - 0s - loss: 2.7781e-04     
Epoch 175/200
1257/1257 [==============================] - 0s - loss: 2.7694e-04     
Epoch 176/200
1257/1257 [==============================] - 0s - loss: 2.7797e-04     
Epoch 177/200
1257/1257 [==============================] - 0s - loss: 2.7578e-04     
Epoch 178/200
1257/1257 [==============================] - 0s - loss: 2.7529e-04     
Epoch 179/200
1257/1257 [==============================] - 0s - loss: 2.7835e-04     
Epoch 180/200
1257/1257 [==============================] - 0s - loss: 2.7804e-04     
Epoch 181/200
1257/1257 [==============================] - 0s - loss: 2.7505e-04     
Epoch 182/200
1257/1257 [==============================] - 0s - loss: 2.7427e-04     
Epoch 183/200
1257/1257 [==============================] - 0s - loss: 2.7410e-04     
Epoch 184/200
1257/1257 [==============================] - 0s - loss: 2.7339e-04     
Epoch 185/200
1257/1257 [==============================] - 0s - loss: 2.7265e-04     
Epoch 186/200
1257/1257 [==============================] - 0s - loss: 2.7163e-04     
Epoch 187/200
1257/1257 [==============================] - 0s - loss: 2.6993e-04     
Epoch 188/200
1257/1257 [==============================] - 0s - loss: 2.7283e-04     
Epoch 189/200
1257/1257 [==============================] - 0s - loss: 2.6959e-04     
Epoch 190/200
1257/1257 [==============================] - 0s - loss: 2.7097e-04     
Epoch 191/200
1257/1257 [==============================] - 0s - loss: 2.7135e-04     
Epoch 192/200
1257/1257 [==============================] - 0s - loss: 2.7306e-04     
Epoch 193/200
1257/1257 [==============================] - 0s - loss: 2.7375e-04     
Epoch 194/200
1257/1257 [==============================] - 0s - loss: 2.7067e-04     
Epoch 195/200
1257/1257 [==============================] - 0s - loss: 2.7292e-04     
Epoch 196/200
1257/1257 [==============================] - 0s - loss: 2.7074e-04     
Epoch 197/200
1257/1257 [==============================] - 0s - loss: 2.6835e-04     
Epoch 198/200
1257/1257 [==============================] - 0s - loss: 2.6727e-04     
Epoch 199/200
1257/1257 [==============================] - 0s - loss: 2.6687e-04     
Epoch 200/200
1257/1257 [==============================] - 0s - loss: 2.6802e-04     
Out[97]:
<keras.callbacks.History at 0x16acc780>

In [98]:
test = pd.read_csv('Google_Stock_Price_Test.csv')
real_price = test.iloc[:,1:2].values
real_price


Out[98]:
array([[ 778.81],
       [ 788.36],
       [ 786.08],
       [ 795.26],
       [ 806.4 ],
       [ 807.86],
       [ 805.  ],
       [ 807.14],
       [ 807.48],
       [ 807.08],
       [ 805.81],
       [ 805.12],
       [ 806.91],
       [ 807.25],
       [ 822.3 ],
       [ 829.62],
       [ 837.81],
       [ 834.71],
       [ 814.66],
       [ 796.86],
       [ 799.68]])

In [99]:
inp = sc.transform(real_price)
inp = np.reshape(inp , (21,1,1))
pred = reg.predict(inp)
pred = sc.inverse_transform(pred)

In [100]:
pred


Out[100]:
array([[ 776.81573486],
       [ 785.63897705],
       [ 783.53417969],
       [ 792.00146484],
       [ 802.18023682],
       [ 802.90258789],
       [ 800.96551514],
       [ 802.54638672],
       [ 802.71466064],
       [ 802.51672363],
       [ 801.71014404],
       [ 801.0758667 ],
       [ 802.43261719],
       [ 802.60083008],
       [ 810.01708984],
       [ 813.60284424],
       [ 817.59887695],
       [ 816.08825684],
       [ 806.25982666],
       [ 793.47540283],
       [ 796.07183838]], dtype=float32)

In [101]:
plt.plot(real_price,color = 'green',label = 'Actual Price')
plt.plot(pred,color = 'blue',label = 'Predicted')
plt.xlabel('Price')
plt.ylabel('Time')
plt.legend()
plt.show()



In [102]:
mean_squared_error(real_price,pred)


Out[102]:
70.681606404813849

In [103]:
## Getting real train stock price 
realtrain = pd.read_csv('Google_Stock_Price_Train.csv')
realtrain = realtrain.iloc[:,1:2].values

In [104]:
predtrain = reg.predict(x_train)
predtrain = sc.inverse_transform(predtrain)
predtrain


Out[104]:
array([[ 327.0027771 ],
       [ 332.38247681],
       [ 331.09152222],
       ..., 
       [ 787.77935791],
       [ 790.56396484],
       [ 780.99414062]], dtype=float32)

In [105]:
plt.plot(realtrain,color = 'red',label = 'Actual Price')
plt.plot(predtrain,color = 'black',label = 'Predicted')
plt.xlabel('Price')
plt.ylabel('Time')
plt.legend()
plt.show()



In [106]:
mean_squared_error(realtrain[0:-1],predtrain)


Out[106]:
7.1794822360920829