In [1]:
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
%matplotlib inline

In [2]:
eurusd = pd.read_csv("data/EURUSD_daily.csv", index_col='Date')

In [3]:
eurusd.index = pd.to_datetime(eurusd.index)

In [4]:
eurusd.plot(figsize=(16,9))


Out[4]:
<matplotlib.axes._subplots.AxesSubplot at 0x7f4c3dae8c18>

In [5]:
split_date = pd.Timestamp('10-10-2016')

In [6]:
train = eurusd.loc[:split_date]
test = eurusd.loc[split_date:]

In [7]:
ax = train.plot()
test.plot(ax=ax)
plt.legend(['train', 'test'])


Out[7]:
<matplotlib.legend.Legend at 0x7f4c3b8de160>

In [8]:
from sklearn.preprocessing import MinMaxScaler

sc = MinMaxScaler()
train_sc = sc.fit_transform(train)
test_sc = sc.transform(test)

In [9]:
X_train = train_sc[:-1]
y_train = train_sc[1:]

X_test = test_sc[:-1]
y_test = test_sc[1:]

In [10]:
from keras.models import Sequential
from keras.layers import Dense
import keras.backend as K
from keras.callbacks import EarlyStopping


Using TensorFlow backend.

In [11]:
train_sc_df = pd.DataFrame(train_sc, columns=['Scaled'], index = train.index)
test_sc_df = pd.DataFrame(test_sc, columns=['Scaled'], index = test.index)
train_sc_df.head()


Out[11]:
Scaled
Date
2000-01-03 0.259034
2000-01-04 0.262013
2000-01-05 0.264344
2000-01-06 0.262013
2000-01-07 0.259940

In [12]:
for s in range(1,21):
    train_sc_df['shift_{}'.format(s)] = train_sc_df['Scaled'].shift(s)
    test_sc_df['shift_{}'.format(s)] = test_sc_df['Scaled'].shift(s)

In [13]:
train_sc_df.head(21)


Out[13]:
Scaled shift_1 shift_2 shift_3 shift_4 shift_5 shift_6 shift_7 shift_8 shift_9 ... shift_11 shift_12 shift_13 shift_14 shift_15 shift_16 shift_17 shift_18 shift_19 shift_20
Date
2000-01-03 0.259034 NaN NaN NaN NaN NaN NaN NaN NaN NaN ... NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
2000-01-04 0.262013 0.259034 NaN NaN NaN NaN NaN NaN NaN NaN ... NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
2000-01-05 0.264344 0.262013 0.259034 NaN NaN NaN NaN NaN NaN NaN ... NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
2000-01-06 0.262013 0.264344 0.262013 0.259034 NaN NaN NaN NaN NaN NaN ... NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
2000-01-07 0.259940 0.262013 0.264344 0.262013 0.259034 NaN NaN NaN NaN NaN ... NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
2000-01-10 0.256443 0.259940 0.262013 0.264344 0.262013 0.259034 NaN NaN NaN NaN ... NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
2000-01-11 0.265510 0.256443 0.259940 0.262013 0.264344 0.262013 0.259034 NaN NaN NaN ... NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
2000-01-12 0.263696 0.265510 0.256443 0.259940 0.262013 0.264344 0.262013 0.259034 NaN NaN ... NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
2000-01-13 0.256573 0.263696 0.265510 0.256443 0.259940 0.262013 0.264344 0.262013 0.259034 NaN ... NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
2000-01-14 0.241679 0.256573 0.263696 0.265510 0.256443 0.259940 0.262013 0.264344 0.262013 0.259034 ... NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
2000-01-17 0.238570 0.241679 0.256573 0.263696 0.265510 0.256443 0.259940 0.262013 0.264344 0.262013 ... NaN NaN NaN NaN NaN NaN NaN NaN NaN NaN
2000-01-18 0.239088 0.238570 0.241679 0.256573 0.263696 0.265510 0.256443 0.259940 0.262013 0.264344 ... 0.259034 NaN NaN NaN NaN NaN NaN NaN NaN NaN
2000-01-19 0.239218 0.239088 0.238570 0.241679 0.256573 0.263696 0.265510 0.256443 0.259940 0.262013 ... 0.262013 0.259034 NaN NaN NaN NaN NaN NaN NaN NaN
2000-01-20 0.244916 0.239218 0.239088 0.238570 0.241679 0.256573 0.263696 0.265510 0.256443 0.259940 ... 0.264344 0.262013 0.259034 NaN NaN NaN NaN NaN NaN NaN
2000-01-21 0.235980 0.244916 0.239218 0.239088 0.238570 0.241679 0.256573 0.263696 0.265510 0.256443 ... 0.262013 0.264344 0.262013 0.259034 NaN NaN NaN NaN NaN NaN
2000-01-24 0.228856 0.235980 0.244916 0.239218 0.239088 0.238570 0.241679 0.256573 0.263696 0.265510 ... 0.259940 0.262013 0.264344 0.262013 0.259034 NaN NaN NaN NaN NaN
2000-01-25 0.225489 0.228856 0.235980 0.244916 0.239218 0.239088 0.238570 0.241679 0.256573 0.263696 ... 0.256443 0.259940 0.262013 0.264344 0.262013 0.259034 NaN NaN NaN NaN
2000-01-26 0.224971 0.225489 0.228856 0.235980 0.244916 0.239218 0.239088 0.238570 0.241679 0.256573 ... 0.265510 0.256443 0.259940 0.262013 0.264344 0.262013 0.259034 NaN NaN NaN
2000-01-27 0.207357 0.224971 0.225489 0.228856 0.235980 0.244916 0.239218 0.239088 0.238570 0.241679 ... 0.263696 0.265510 0.256443 0.259940 0.262013 0.264344 0.262013 0.259034 NaN NaN
2000-01-28 0.193887 0.207357 0.224971 0.225489 0.228856 0.235980 0.244916 0.239218 0.239088 0.238570 ... 0.256573 0.263696 0.265510 0.256443 0.259940 0.262013 0.264344 0.262013 0.259034 NaN
2000-01-31 0.183784 0.193887 0.207357 0.224971 0.225489 0.228856 0.235980 0.244916 0.239218 0.239088 ... 0.241679 0.256573 0.263696 0.265510 0.256443 0.259940 0.262013 0.264344 0.262013 0.259034

21 rows × 21 columns


In [14]:
X_train = train_sc_df.dropna().drop('Scaled', axis=1)
y_train = train_sc_df.dropna()[['Scaled']]

X_test = test_sc_df.dropna().drop('Scaled', axis=1)
y_test = test_sc_df.dropna()[['Scaled']]

In [15]:
#converts to numpy array
X_train = X_train.values
X_test= X_test.values

y_train = y_train.values
y_test = y_test.values

In [16]:
X_train_t = X_train.reshape(X_train.shape[0], 1, 20)
X_test_t = X_test.reshape(X_test.shape[0], 1, 20)

In [17]:
from keras.layers import LSTM

In [18]:
K.clear_session()
model = Sequential()

model.add(LSTM(6, input_shape=(1, 20)))


model.add(Dense(1))

model.compile(loss='mean_squared_error', optimizer='adam')

In [19]:
early_stop = EarlyStopping(monitor='loss', patience=1, verbose=1)

In [20]:
model.fit(X_train_t, y_train, epochs=100,
          batch_size=1, verbose=1, callbacks=[early_stop])


Epoch 1/100
4356/4356 [==============================] - 15s - loss: 0.0020    
Epoch 2/100
4356/4356 [==============================] - 13s - loss: 4.2285e-04    
Epoch 3/100
4356/4356 [==============================] - 13s - loss: 3.1962e-04    
Epoch 4/100
4356/4356 [==============================] - 13s - loss: 2.8096e-04    
Epoch 5/100
4356/4356 [==============================] - 13s - loss: 2.4730e-04    
Epoch 6/100
4356/4356 [==============================] - 12s - loss: 2.3569e-04    
Epoch 7/100
4356/4356 [==============================] - 12s - loss: 2.1161e-04    
Epoch 8/100
4356/4356 [==============================] - 13s - loss: 2.0204e-04    
Epoch 9/100
4356/4356 [==============================] - 13s - loss: 2.0030e-04    
Epoch 10/100
4356/4356 [==============================] - 13s - loss: 1.9735e-04    
Epoch 11/100
4356/4356 [==============================] - 13s - loss: 1.9254e-04    
Epoch 12/100
4356/4356 [==============================] - 13s - loss: 1.9238e-04    
Epoch 13/100
4356/4356 [==============================] - 13s - loss: 1.8723e-04    
Epoch 14/100
4356/4356 [==============================] - 13s - loss: 1.8391e-04    
Epoch 15/100
4356/4356 [==============================] - 13s - loss: 1.7667e-04    
Epoch 16/100
4356/4356 [==============================] - 13s - loss: 1.8120e-04    
Epoch 17/100
4356/4356 [==============================] - 13s - loss: 1.8487e-04    
Epoch 00016: early stopping
Out[20]:
<keras.callbacks.History at 0x7f4be803dfd0>

In [21]:
fig = plt.figure(figsize=(16,9))


<matplotlib.figure.Figure at 0x7f4bd473f518>

In [22]:
fig = plt.figure(figsize=(16,9))

y_pred = model.predict(X_test_t)
plt.plot(y_pred)
plt.plot(y_test)
plt.legend(['predicted', 'real'])


Out[22]:
<matplotlib.legend.Legend at 0x7f4bd4509588>

In [23]:
X_train_t = X_train.reshape(X_train.shape[0],20,1)
X_test_t = X_test.reshape(X_test.shape[0],20,1)

In [43]:
K.clear_session()
model = Sequential()

model.add(LSTM(16,  input_shape=(20, 1), activation='tanh'))#return_sequences=True,
#model.add(LSTM(16, activation='tanh'))

model.add(Dense(1))

model.compile(loss='mean_squared_error', optimizer='adam')

In [44]:
model.fit(X_train_t, y_train, epochs=100, batch_size=16, verbose=1,callbacks=[early_stop])


Epoch 1/100
4356/4356 [==============================] - 4s - loss: 0.0273     
Epoch 2/100
4356/4356 [==============================] - 4s - loss: 0.0013     
Epoch 3/100
4356/4356 [==============================] - 3s - loss: 0.0011     
Epoch 4/100
4356/4356 [==============================] - 4s - loss: 0.0011     
Epoch 5/100
4356/4356 [==============================] - 4s - loss: 0.0011     
Epoch 6/100
4356/4356 [==============================] - 4s - loss: 0.0010     
Epoch 7/100
4356/4356 [==============================] - 4s - loss: 9.5487e-04     
Epoch 8/100
4356/4356 [==============================] - 4s - loss: 9.1920e-04     
Epoch 9/100
4356/4356 [==============================] - 4s - loss: 8.8877e-04     
Epoch 10/100
4356/4356 [==============================] - 4s - loss: 7.5693e-04     
Epoch 11/100
4356/4356 [==============================] - 4s - loss: 6.8808e-04     
Epoch 12/100
4356/4356 [==============================] - 3s - loss: 6.6870e-04     
Epoch 13/100
4356/4356 [==============================] - 4s - loss: 6.6425e-04     
Epoch 14/100
4356/4356 [==============================] - 4s - loss: 6.0916e-04     
Epoch 15/100
4356/4356 [==============================] - 4s - loss: 5.4893e-04     
Epoch 16/100
4356/4356 [==============================] - 4s - loss: 5.5606e-04     
Epoch 17/100
4356/4356 [==============================] - 4s - loss: 5.0449e-04     
Epoch 18/100
4356/4356 [==============================] - 3s - loss: 4.7661e-04     
Epoch 19/100
4356/4356 [==============================] - 4s - loss: 5.1428e-04     
Epoch 20/100
4356/4356 [==============================] - 4s - loss: 5.3258e-04     
Epoch 00019: early stopping
Out[44]:
<keras.callbacks.History at 0x7f4bb6cdbc50>

In [46]:
fig = plt.figure(figsize=(16,8))
y_pred = model.predict(X_test_t, batch_size=16)
plt.plot(y_pred)
plt.plot(y_test)
plt.legend(['predicted', 'real'])


Out[46]:
<matplotlib.legend.Legend at 0x7f4b9b9ffef0>

In [ ]:


In [47]:
import h5py

In [48]:
model.save_weights("model.h5")

In [49]:
json_model = model.to_json()

In [50]:
with open("model.json", "w") as json_file:
    json_file.write(json_model)

In [ ]: