In [1]:
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
%matplotlib inline
In [2]:
eurusd = pd.read_csv("data/EURUSD_daily.csv", index_col='Date')
In [3]:
eurusd.index = pd.to_datetime(eurusd.index)
In [4]:
eurusd.plot(figsize=(16,9))
Out[4]:
In [5]:
split_date = pd.Timestamp('10-10-2016')
In [6]:
train = eurusd.loc[:split_date]
test = eurusd.loc[split_date:]
In [7]:
ax = train.plot()
test.plot(ax=ax)
plt.legend(['train', 'test'])
Out[7]:
In [8]:
from sklearn.preprocessing import MinMaxScaler
sc = MinMaxScaler()
train_sc = sc.fit_transform(train)
test_sc = sc.transform(test)
In [9]:
X_train = train_sc[:-1]
y_train = train_sc[1:]
X_test = test_sc[:-1]
y_test = test_sc[1:]
In [10]:
from keras.models import Sequential
from keras.layers import Dense
import keras.backend as K
from keras.callbacks import EarlyStopping
In [11]:
train_sc_df = pd.DataFrame(train_sc, columns=['Scaled'], index = train.index)
test_sc_df = pd.DataFrame(test_sc, columns=['Scaled'], index = test.index)
train_sc_df.head()
Out[11]:
In [12]:
for s in range(1,21):
train_sc_df['shift_{}'.format(s)] = train_sc_df['Scaled'].shift(s)
test_sc_df['shift_{}'.format(s)] = test_sc_df['Scaled'].shift(s)
In [13]:
train_sc_df.head(21)
Out[13]:
In [14]:
X_train = train_sc_df.dropna().drop('Scaled', axis=1)
y_train = train_sc_df.dropna()[['Scaled']]
X_test = test_sc_df.dropna().drop('Scaled', axis=1)
y_test = test_sc_df.dropna()[['Scaled']]
In [15]:
#converts to numpy array
X_train = X_train.values
X_test= X_test.values
y_train = y_train.values
y_test = y_test.values
In [16]:
X_train_t = X_train.reshape(X_train.shape[0], 1, 20)
X_test_t = X_test.reshape(X_test.shape[0], 1, 20)
In [17]:
from keras.layers import LSTM
In [18]:
K.clear_session()
model = Sequential()
model.add(LSTM(6, input_shape=(1, 20)))
model.add(Dense(1))
model.compile(loss='mean_squared_error', optimizer='adam')
In [19]:
early_stop = EarlyStopping(monitor='loss', patience=1, verbose=1)
In [20]:
model.fit(X_train_t, y_train, epochs=100,
batch_size=1, verbose=1, callbacks=[early_stop])
Out[20]:
In [21]:
fig = plt.figure(figsize=(16,9))
In [22]:
fig = plt.figure(figsize=(16,9))
y_pred = model.predict(X_test_t)
plt.plot(y_pred)
plt.plot(y_test)
plt.legend(['predicted', 'real'])
Out[22]:
In [23]:
X_train_t = X_train.reshape(X_train.shape[0],20,1)
X_test_t = X_test.reshape(X_test.shape[0],20,1)
In [43]:
K.clear_session()
model = Sequential()
model.add(LSTM(16, input_shape=(20, 1), activation='tanh'))#return_sequences=True,
#model.add(LSTM(16, activation='tanh'))
model.add(Dense(1))
model.compile(loss='mean_squared_error', optimizer='adam')
In [44]:
model.fit(X_train_t, y_train, epochs=100, batch_size=16, verbose=1,callbacks=[early_stop])
Out[44]:
In [46]:
fig = plt.figure(figsize=(16,8))
y_pred = model.predict(X_test_t, batch_size=16)
plt.plot(y_pred)
plt.plot(y_test)
plt.legend(['predicted', 'real'])
Out[46]:
In [ ]:
In [47]:
import h5py
In [48]:
model.save_weights("model.h5")
In [49]:
json_model = model.to_json()
In [50]:
with open("model.json", "w") as json_file:
json_file.write(json_model)
In [ ]: