In [14]:
from math import sqrt
from numpy import concatenate
from matplotlib import pyplot
import pandas as pd
from sklearn.preprocessing import MinMaxScaler, LabelEncoder
from sklearn.metrics import mean_squared_error
from keras.models import Sequential
from keras.layers import Dense, LSTM
from datetime import datetime

# download the dataset here: https://archive.ics.uci.edu/ml/datasets/Beijing+PM2.5+Data

In [3]:
raw_data = pd.read_csv("beijing_pm25.csv")
raw_data.head()


Out[3]:
No year month day hour pm2.5 DEWP TEMP PRES cbwd Iws Is Ir
0 1 2010 1 1 0 NaN -21 -11.0 1021.0 NW 1.79 0 0
1 2 2010 1 1 1 NaN -21 -12.0 1020.0 NW 4.92 0 0
2 3 2010 1 1 2 NaN -21 -11.0 1019.0 NW 6.71 0 0
3 4 2010 1 1 3 NaN -21 -14.0 1019.0 NW 9.84 0 0
4 5 2010 1 1 4 NaN -20 -12.0 1018.0 NW 12.97 0 0

In [9]:
print raw_data.isnull().sum()
print raw_data.shape
print float(raw_data['pm2.5'].isnull().sum())/raw_data.shape[0]


No          0
year        0
month       0
day         0
hour        0
pm2.5    2067
DEWP        0
TEMP        0
PRES        0
cbwd        0
Iws         0
Is          0
Ir          0
dtype: int64
(43824, 13)
0.0471659364732

In [10]:
print min(raw_data['pm2.5'].dropna()), max(raw_data['pm2.5'].dropna()), raw_data['pm2.5'].dropna().median()


0.0 994.0 72.0

In [16]:
# Data Preprocessing

def str2time(time_str):
    return datetime.strptime(time_str, '%Y %m %d %H')

## combine year, month, day, hour into datetime
raw_data = pd.read_csv("beijing_pm25.csv", parse_dates=[['year', 'month', 'day', 'hour']], index_col=0, date_parser=str2time)
raw_data.drop('No', axis=1, inplace=True)  # the above step create a column 'No'

## rename columns
raw_data.columns = ['polution', 'dew', 'temp', 'press', 'wnd_dir', 'wnd_spd', 'snow', 'rain']
raw_data.index.name = 'date'

## impute NA with median
population_median = raw_data['polution'].dropna().median()
raw_data['polution'].fillna(population_median, inplace=True)

raw_data.head()


Out[16]:
polution dew temp press wnd_dir wnd_spd snow rain
date
2010-01-01 00:00:00 72.0 -21 -11.0 1021.0 NW 1.79 0 0
2010-01-01 01:00:00 72.0 -21 -12.0 1020.0 NW 4.92 0 0
2010-01-01 02:00:00 72.0 -21 -11.0 1019.0 NW 6.71 0 0
2010-01-01 03:00:00 72.0 -21 -14.0 1019.0 NW 9.84 0 0
2010-01-01 04:00:00 72.0 -20 -12.0 1018.0 NW 12.97 0 0

In [17]:
# label encoding wnd_spd to integers

encoder = LabelEncoder()
raw_data['wnd_dir'] = encoder.fit_transform(raw_data['wnd_dir'])
print raw_data['wnd_dir'].unique()
raw_data.head()


[1 3 0 2]
Out[17]:
polution dew temp press wnd_dir wnd_spd snow rain
date
2010-01-01 00:00:00 72.0 -21 -11.0 1021.0 1 1.79 0 0
2010-01-01 01:00:00 72.0 -21 -12.0 1020.0 1 4.92 0 0
2010-01-01 02:00:00 72.0 -21 -11.0 1019.0 1 6.71 0 0
2010-01-01 03:00:00 72.0 -21 -14.0 1019.0 1 9.84 0 0
2010-01-01 04:00:00 72.0 -20 -12.0 1018.0 1 12.97 0 0

In [18]:
pyplot.figure()
col_nums = raw_data.shape[1]

for i in range(col_nums):
    pyplot.subplot(col_nums, 1, i+1)
    pyplot.plot(raw_data.values[:, i])
    pyplot.title(raw_data.columns[i], y=1, loc='right')  # y is the interval width between each plot
    pyplot.show()



In [19]:
# make sure all the data is float type
values = raw_data.values
values = values.astype('float32')
values


Out[19]:
array([[  72.        ,  -21.        ,  -11.        , ...,    1.78999996,
           0.        ,    0.        ],
       [  72.        ,  -21.        ,  -12.        , ...,    4.92000008,
           0.        ,    0.        ],
       [  72.        ,  -21.        ,  -11.        , ...,    6.71000004,
           0.        ,    0.        ],
       ..., 
       [  10.        ,  -22.        ,   -3.        , ...,  242.69999695,
           0.        ,    0.        ],
       [   8.        ,  -22.        ,   -4.        , ...,  246.72000122,
           0.        ,    0.        ],
       [  12.        ,  -21.        ,   -3.        , ...,  249.8500061 ,
           0.        ,    0.        ]], dtype=float32)

In [20]:
# normalize the data into [0,1] range
scaler = MinMaxScaler(feature_range=(0, 1))
scaled = scaler.fit_transform(values)
scaled


Out[20]:
array([[ 0.0724346 ,  0.27941179,  0.13114753, ...,  0.00229001,
         0.        ,  0.        ],
       [ 0.0724346 ,  0.27941179,  0.1147541 , ...,  0.00763907,
         0.        ,  0.        ],
       [ 0.0724346 ,  0.27941179,  0.13114753, ...,  0.01069811,
         0.        ,  0.        ],
       ..., 
       [ 0.01006036,  0.2647059 ,  0.26229507, ...,  0.41399646,
         0.        ,  0.        ],
       [ 0.00804829,  0.2647059 ,  0.24590163, ...,  0.42086649,
         0.        ,  0.        ],
       [ 0.01207243,  0.27941179,  0.26229507, ...,  0.42621556,
         0.        ,  0.        ]], dtype=float32)

In [21]:
# Convert to supervided data
## Here, vari(t) is the 1 time step forard from vari(t-1)

def series_to_supervised(data, n_in=1, n_out=1, dropnan=True):
    n_vars = 1 if type(data) is list else data.shape[1]  # number of columns in the original data
    df = pd.DataFrame(data)
    cols, names = [], []
    # input sequence (t-n, ... t-1)
    for i in range(n_in, 0, -1):
        cols.append(df.shift(i))
        names += [('var%d(t-%d)' % (j+1, i)) for j in range(n_vars)]
    # forecast sequence (t, t+1, ... t+n)
    for i in range(0, n_out):
        cols.append(df.shift(-i))
        if i == 0:
            names += [('var%d(t)' % (j+1)) for j in range(n_vars)]
        else:
            names += [('var%d(t+%d)' % (j+1, i)) for j in range(n_vars)]
    # put it all together
    agg = pd.concat(cols, axis=1)
    agg.columns = names
    # drop rows with NaN values
    if dropnan:
        agg.dropna(inplace=True)
    return agg

forward_steps = 3
reframed = series_to_supervised(scaled, forward_steps, 1)  # Here is where different from one-step forward
reframed


Out[21]:
var1(t-3) var2(t-3) var3(t-3) var4(t-3) var5(t-3) var6(t-3) var7(t-3) var8(t-3) var1(t-2) var2(t-2) ... var7(t-1) var8(t-1) var1(t) var2(t) var3(t) var4(t) var5(t) var6(t) var7(t) var8(t)
3 0.072435 0.279412 0.131148 0.545454 0.333333 0.002290 0.000000 0.0 0.072435 0.279412 ... 0.000000 0.0 0.072435 0.279412 0.081967 0.509090 0.333333 0.016047 0.000000 0.0
4 0.072435 0.279412 0.114754 0.527273 0.333333 0.007639 0.000000 0.0 0.072435 0.279412 ... 0.000000 0.0 0.072435 0.294118 0.114754 0.490910 0.333333 0.021396 0.000000 0.0
5 0.072435 0.279412 0.131148 0.509090 0.333333 0.010698 0.000000 0.0 0.072435 0.279412 ... 0.000000 0.0 0.072435 0.308824 0.147541 0.472727 0.333333 0.026745 0.000000 0.0
6 0.072435 0.279412 0.081967 0.509090 0.333333 0.016047 0.000000 0.0 0.072435 0.294118 ... 0.000000 0.0 0.072435 0.308824 0.163934 0.472727 0.333333 0.032094 0.000000 0.0
7 0.072435 0.294118 0.114754 0.490910 0.333333 0.021396 0.000000 0.0 0.072435 0.308824 ... 0.000000 0.0 0.072435 0.308824 0.163934 0.472727 0.333333 0.035153 0.000000 0.0
8 0.072435 0.308824 0.147541 0.472727 0.333333 0.026745 0.000000 0.0 0.072435 0.308824 ... 0.000000 0.0 0.072435 0.308824 0.163934 0.472727 0.333333 0.040502 0.000000 0.0
9 0.072435 0.308824 0.163934 0.472727 0.333333 0.032094 0.000000 0.0 0.072435 0.308824 ... 0.000000 0.0 0.072435 0.294118 0.180328 0.472727 0.333333 0.045851 0.000000 0.0
10 0.072435 0.308824 0.163934 0.472727 0.333333 0.035153 0.000000 0.0 0.072435 0.308824 ... 0.000000 0.0 0.072435 0.308824 0.196721 0.472727 0.333333 0.052722 0.000000 0.0
11 0.072435 0.308824 0.163934 0.472727 0.333333 0.040502 0.000000 0.0 0.072435 0.294118 ... 0.000000 0.0 0.072435 0.323529 0.229508 0.472727 0.333333 0.058071 0.000000 0.0
12 0.072435 0.294118 0.180328 0.472727 0.333333 0.045851 0.000000 0.0 0.072435 0.308824 ... 0.000000 0.0 0.072435 0.308824 0.229508 0.436363 0.333333 0.063420 0.000000 0.0
13 0.072435 0.308824 0.196721 0.472727 0.333333 0.052722 0.000000 0.0 0.072435 0.323529 ... 0.000000 0.0 0.072435 0.323529 0.262295 0.436363 0.333333 0.068769 0.000000 0.0
14 0.072435 0.323529 0.229508 0.472727 0.333333 0.058071 0.000000 0.0 0.072435 0.308824 ... 0.000000 0.0 0.072435 0.323529 0.278689 0.418182 0.333333 0.074118 0.000000 0.0
15 0.072435 0.308824 0.229508 0.436363 0.333333 0.063420 0.000000 0.0 0.072435 0.323529 ... 0.000000 0.0 0.072435 0.323529 0.295082 0.418182 1.000000 0.000752 0.000000 0.0
16 0.072435 0.323529 0.262295 0.436363 0.333333 0.068769 0.000000 0.0 0.072435 0.323529 ... 0.000000 0.0 0.072435 0.308824 0.278689 0.436363 0.333333 0.002290 0.000000 0.0
17 0.072435 0.323529 0.278689 0.418182 0.333333 0.074118 0.000000 0.0 0.072435 0.323529 ... 0.000000 0.0 0.072435 0.323529 0.262295 0.436363 0.333333 0.003811 0.000000 0.0
18 0.072435 0.323529 0.295082 0.418182 1.000000 0.000752 0.000000 0.0 0.072435 0.308824 ... 0.000000 0.0 0.072435 0.323529 0.229508 0.454546 0.000000 0.002290 0.000000 0.0
19 0.072435 0.308824 0.278689 0.436363 0.333333 0.002290 0.000000 0.0 0.072435 0.323529 ... 0.000000 0.0 0.072435 0.338235 0.245902 0.472727 0.333333 0.002290 0.000000 0.0
20 0.072435 0.323529 0.262295 0.436363 0.333333 0.003811 0.000000 0.0 0.072435 0.323529 ... 0.000000 0.0 0.072435 0.338235 0.229508 0.472727 1.000000 0.000752 0.000000 0.0
21 0.072435 0.323529 0.229508 0.454546 0.000000 0.002290 0.000000 0.0 0.072435 0.338235 ... 0.000000 0.0 0.072435 0.338235 0.229508 0.490910 0.333333 0.002290 0.000000 0.0
22 0.072435 0.338235 0.245902 0.472727 0.333333 0.002290 0.000000 0.0 0.072435 0.338235 ... 0.000000 0.0 0.072435 0.338235 0.229508 0.490910 0.333333 0.003811 0.000000 0.0
23 0.072435 0.338235 0.229508 0.472727 1.000000 0.000752 0.000000 0.0 0.072435 0.338235 ... 0.000000 0.0 0.072435 0.338235 0.229508 0.527273 1.000000 0.000752 0.000000 0.0
24 0.072435 0.338235 0.229508 0.490910 0.333333 0.002290 0.000000 0.0 0.072435 0.338235 ... 0.000000 0.0 0.129779 0.352941 0.245902 0.527273 0.666667 0.002290 0.000000 0.0
25 0.072435 0.338235 0.229508 0.490910 0.333333 0.003811 0.000000 0.0 0.072435 0.338235 ... 0.000000 0.0 0.148893 0.367647 0.245902 0.527273 0.666667 0.003811 0.000000 0.0
26 0.072435 0.338235 0.229508 0.527273 1.000000 0.000752 0.000000 0.0 0.129779 0.352941 ... 0.000000 0.0 0.159960 0.426471 0.229508 0.545454 0.666667 0.005332 0.000000 0.0
27 0.129779 0.352941 0.245902 0.527273 0.666667 0.002290 0.000000 0.0 0.148893 0.367647 ... 0.000000 0.0 0.182093 0.485294 0.229508 0.563637 0.666667 0.008391 0.037037 0.0
28 0.148893 0.367647 0.245902 0.527273 0.666667 0.003811 0.000000 0.0 0.159960 0.426471 ... 0.037037 0.0 0.138833 0.485294 0.229508 0.563637 0.666667 0.009912 0.074074 0.0
29 0.159960 0.426471 0.229508 0.545454 0.666667 0.005332 0.000000 0.0 0.182093 0.485294 ... 0.074074 0.0 0.109658 0.485294 0.213115 0.563637 0.666667 0.011433 0.111111 0.0
30 0.182093 0.485294 0.229508 0.563637 0.666667 0.008391 0.037037 0.0 0.138833 0.485294 ... 0.111111 0.0 0.105634 0.485294 0.213115 0.581818 0.666667 0.014492 0.148148 0.0
31 0.138833 0.485294 0.229508 0.563637 0.666667 0.009912 0.074074 0.0 0.109658 0.485294 ... 0.148148 0.0 0.124748 0.485294 0.229508 0.600000 0.666667 0.017551 0.000000 0.0
32 0.109658 0.485294 0.213115 0.563637 0.666667 0.011433 0.111111 0.0 0.105634 0.485294 ... 0.000000 0.0 0.120724 0.470588 0.213115 0.600000 0.666667 0.020610 0.000000 0.0
... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ...
43794 0.055332 0.382353 0.409836 0.472727 0.666667 0.019106 0.000000 0.0 0.060362 0.382353 ... 0.000000 0.0 0.079477 0.397059 0.344262 0.527273 0.000000 0.005349 0.000000 0.0
43795 0.060362 0.382353 0.409836 0.490910 1.000000 0.000752 0.000000 0.0 0.063380 0.382353 ... 0.000000 0.0 0.035211 0.470588 0.409836 0.545454 0.333333 0.009160 0.000000 0.0
43796 0.063380 0.382353 0.393443 0.509090 0.000000 0.002290 0.000000 0.0 0.079477 0.397059 ... 0.000000 0.0 0.026157 0.426471 0.393443 0.563637 0.333333 0.021379 0.000000 0.0
43797 0.079477 0.397059 0.344262 0.527273 0.000000 0.005349 0.000000 0.0 0.035211 0.470588 ... 0.000000 0.0 0.020121 0.411765 0.377049 0.581818 0.333333 0.036657 0.000000 0.0
43798 0.035211 0.470588 0.409836 0.545454 0.333333 0.009160 0.000000 0.0 0.026157 0.426471 ... 0.000000 0.0 0.008048 0.279412 0.344262 0.618183 0.333333 0.053456 0.000000 0.0
43799 0.026157 0.426471 0.393443 0.563637 0.333333 0.021379 0.000000 0.0 0.020121 0.411765 ... 0.000000 0.0 0.016097 0.264706 0.311475 0.636364 0.333333 0.065675 0.000000 0.0
43800 0.020121 0.411765 0.377049 0.581818 0.333333 0.036657 0.000000 0.0 0.008048 0.279412 ... 0.000000 0.0 0.010060 0.308824 0.295082 0.654547 0.333333 0.087824 0.000000 0.0
43801 0.008048 0.279412 0.344262 0.618183 0.333333 0.053456 0.000000 0.0 0.016097 0.264706 ... 0.000000 0.0 0.011066 0.323529 0.295082 0.672728 0.333333 0.104623 0.000000 0.0
43802 0.016097 0.264706 0.311475 0.636364 0.333333 0.065675 0.000000 0.0 0.010060 0.308824 ... 0.000000 0.0 0.020121 0.338235 0.295082 0.672728 0.333333 0.119901 0.000000 0.0
43803 0.010060 0.308824 0.295082 0.654547 0.333333 0.087824 0.000000 0.0 0.011066 0.323529 ... 0.000000 0.0 0.009054 0.338235 0.295082 0.690910 0.333333 0.139007 0.000000 0.0
43804 0.011066 0.323529 0.295082 0.672728 0.333333 0.104623 0.000000 0.0 0.020121 0.338235 ... 0.000000 0.0 0.008048 0.308824 0.278689 0.709091 0.333333 0.161155 0.000000 0.0
43805 0.020121 0.338235 0.295082 0.672728 0.333333 0.119901 0.000000 0.0 0.009054 0.338235 ... 0.000000 0.0 0.009054 0.279412 0.262295 0.709091 0.333333 0.187132 0.000000 0.0
43806 0.009054 0.338235 0.295082 0.690910 0.333333 0.139007 0.000000 0.0 0.008048 0.308824 ... 0.000000 0.0 0.008048 0.250000 0.245902 0.745455 0.333333 0.221516 0.000000 0.0
43807 0.008048 0.308824 0.278689 0.709091 0.333333 0.161155 0.000000 0.0 0.009054 0.279412 ... 0.000000 0.0 0.008048 0.264706 0.229508 0.781818 0.333333 0.243664 0.000000 0.0
43808 0.009054 0.279412 0.262295 0.709091 0.333333 0.187132 0.000000 0.0 0.008048 0.250000 ... 0.000000 0.0 0.008048 0.264706 0.229508 0.781818 0.333333 0.255883 0.000000 0.0
43809 0.008048 0.250000 0.245902 0.745455 0.333333 0.221516 0.000000 0.0 0.008048 0.264706 ... 0.000000 0.0 0.008048 0.264706 0.262295 0.781818 0.333333 0.265812 0.000000 0.0
43810 0.008048 0.264706 0.229508 0.781818 0.333333 0.243664 0.000000 0.0 0.008048 0.264706 ... 0.000000 0.0 0.007042 0.264706 0.278689 0.781818 0.333333 0.278031 0.000000 0.0
43811 0.008048 0.264706 0.229508 0.781818 0.333333 0.255883 0.000000 0.0 0.008048 0.264706 ... 0.000000 0.0 0.012072 0.264706 0.278689 0.781818 0.333333 0.290250 0.000000 0.0
43812 0.008048 0.264706 0.262295 0.781818 0.333333 0.265812 0.000000 0.0 0.007042 0.264706 ... 0.000000 0.0 0.017103 0.264706 0.311475 0.763638 0.333333 0.302469 0.000000 0.0
43813 0.007042 0.264706 0.278689 0.781818 0.333333 0.278031 0.000000 0.0 0.012072 0.264706 ... 0.000000 0.0 0.011066 0.191177 0.311475 0.745455 0.333333 0.317748 0.000000 0.0
43814 0.012072 0.264706 0.278689 0.781818 0.333333 0.290250 0.000000 0.0 0.017103 0.264706 ... 0.000000 0.0 0.009054 0.191177 0.327869 0.745455 0.333333 0.334547 0.000000 0.0
43815 0.017103 0.264706 0.311475 0.763638 0.333333 0.302469 0.000000 0.0 0.011066 0.191177 ... 0.000000 0.0 0.011066 0.205882 0.327869 0.745455 0.333333 0.349825 0.000000 0.0
43816 0.011066 0.191177 0.311475 0.745455 0.333333 0.317748 0.000000 0.0 0.009054 0.191177 ... 0.000000 0.0 0.008048 0.250000 0.311475 0.745455 0.333333 0.365103 0.000000 0.0
43817 0.009054 0.191177 0.327869 0.745455 0.333333 0.334547 0.000000 0.0 0.011066 0.205882 ... 0.000000 0.0 0.009054 0.264706 0.295082 0.763638 0.333333 0.377322 0.000000 0.0
43818 0.011066 0.205882 0.327869 0.745455 0.333333 0.349825 0.000000 0.0 0.008048 0.250000 ... 0.000000 0.0 0.010060 0.264706 0.278689 0.763638 0.333333 0.385730 0.000000 0.0
43819 0.008048 0.250000 0.311475 0.745455 0.333333 0.365103 0.000000 0.0 0.009054 0.264706 ... 0.000000 0.0 0.008048 0.250000 0.278689 0.781818 0.333333 0.395659 0.000000 0.0
43820 0.009054 0.264706 0.295082 0.763638 0.333333 0.377322 0.000000 0.0 0.010060 0.264706 ... 0.000000 0.0 0.010060 0.264706 0.262295 0.781818 0.333333 0.405588 0.000000 0.0
43821 0.010060 0.264706 0.278689 0.763638 0.333333 0.385730 0.000000 0.0 0.008048 0.250000 ... 0.000000 0.0 0.010060 0.264706 0.262295 0.781818 0.333333 0.413996 0.000000 0.0
43822 0.008048 0.250000 0.278689 0.781818 0.333333 0.395659 0.000000 0.0 0.010060 0.264706 ... 0.000000 0.0 0.008048 0.264706 0.245902 0.781818 0.333333 0.420866 0.000000 0.0
43823 0.010060 0.264706 0.262295 0.781818 0.333333 0.405588 0.000000 0.0 0.010060 0.264706 ... 0.000000 0.0 0.012072 0.279412 0.262295 0.781818 0.333333 0.426216 0.000000 0.0

43821 rows × 32 columns


In [24]:
# split into train and test sets
values = reframed.values
n_train_hours = 365*24*3  # raw_data has 4 years data reocred in hourly basis, here I'm using first 3 years as training
train = values[:n_train_hours, :]
test = values[n_train_hours:, :]

feature_num = raw_data.shape[1]  # number of features

# split into input and outputs
## predict multiple labels
train_X, train_y = train[:, :feature_num*forward_steps], train[:, -feature_num]  
test_X, test_y = test[:, :feature_num*forward_steps], test[:, -feature_num]

# reshape input to be 3D [samples, timesteps, dimensions]
train_X = train_X.reshape((train_X.shape[0], forward_steps, feature_num))  # Also different from one-step forward
test_X = test_X.reshape((test_X.shape[0], forward_steps, feature_num))

print(train_X.shape, train_y.shape, test_X.shape, test_y.shape)


((26280, 3, 8), (26280,), (17541, 3, 8), (17541,))

In [25]:
# LSTM
model = Sequential()
model.add(LSTM(50, input_shape=(train_X.shape[1], train_X.shape[2])))
model.add(Dense(1))
model.compile(loss='mae', optimizer='adam')  # using Mean Absolute Error here

# fit network
history = model.fit(train_X, train_y, epochs=50, batch_size=72, validation_data=(test_X, test_y), 
                    verbose=2, shuffle=False)


Train on 26280 samples, validate on 17541 samples
Epoch 1/50
6s - loss: 0.0340 - val_loss: 0.0266
Epoch 2/50
6s - loss: 0.0187 - val_loss: 0.0208
Epoch 3/50
6s - loss: 0.0177 - val_loss: 0.0188
Epoch 4/50
6s - loss: 0.0160 - val_loss: 0.0180
Epoch 5/50
6s - loss: 0.0145 - val_loss: 0.0146
Epoch 6/50
6s - loss: 0.0138 - val_loss: 0.0153
Epoch 7/50
6s - loss: 0.0133 - val_loss: 0.0155
Epoch 8/50
6s - loss: 0.0133 - val_loss: 0.0145
Epoch 9/50
6s - loss: 0.0132 - val_loss: 0.0149
Epoch 10/50
6s - loss: 0.0131 - val_loss: 0.0147
Epoch 11/50
6s - loss: 0.0131 - val_loss: 0.0152
Epoch 12/50
6s - loss: 0.0130 - val_loss: 0.0146
Epoch 13/50
6s - loss: 0.0131 - val_loss: 0.0149
Epoch 14/50
6s - loss: 0.0133 - val_loss: 0.0144
Epoch 15/50
6s - loss: 0.0130 - val_loss: 0.0148
Epoch 16/50
6s - loss: 0.0131 - val_loss: 0.0140
Epoch 17/50
6s - loss: 0.0131 - val_loss: 0.0138
Epoch 18/50
6s - loss: 0.0132 - val_loss: 0.0143
Epoch 19/50
6s - loss: 0.0131 - val_loss: 0.0142
Epoch 20/50
6s - loss: 0.0130 - val_loss: 0.0143
Epoch 21/50
6s - loss: 0.0131 - val_loss: 0.0145
Epoch 22/50
6s - loss: 0.0130 - val_loss: 0.0140
Epoch 23/50
6s - loss: 0.0130 - val_loss: 0.0140
Epoch 24/50
6s - loss: 0.0130 - val_loss: 0.0142
Epoch 25/50
6s - loss: 0.0130 - val_loss: 0.0140
Epoch 26/50
6s - loss: 0.0130 - val_loss: 0.0140
Epoch 27/50
6s - loss: 0.0130 - val_loss: 0.0135
Epoch 28/50
6s - loss: 0.0129 - val_loss: 0.0136
Epoch 29/50
6s - loss: 0.0129 - val_loss: 0.0136
Epoch 30/50
6s - loss: 0.0129 - val_loss: 0.0136
Epoch 31/50
6s - loss: 0.0129 - val_loss: 0.0138
Epoch 32/50
6s - loss: 0.0128 - val_loss: 0.0140
Epoch 33/50
6s - loss: 0.0129 - val_loss: 0.0136
Epoch 34/50
6s - loss: 0.0129 - val_loss: 0.0134
Epoch 35/50
6s - loss: 0.0129 - val_loss: 0.0137
Epoch 36/50
7s - loss: 0.0129 - val_loss: 0.0134
Epoch 37/50
7s - loss: 0.0128 - val_loss: 0.0136
Epoch 38/50
6s - loss: 0.0128 - val_loss: 0.0136
Epoch 39/50
6s - loss: 0.0128 - val_loss: 0.0136
Epoch 40/50
6s - loss: 0.0128 - val_loss: 0.0136
Epoch 41/50
6s - loss: 0.0128 - val_loss: 0.0140
Epoch 42/50
6s - loss: 0.0128 - val_loss: 0.0138
Epoch 43/50
6s - loss: 0.0128 - val_loss: 0.0139
Epoch 44/50
6s - loss: 0.0128 - val_loss: 0.0137
Epoch 45/50
6s - loss: 0.0128 - val_loss: 0.0139
Epoch 46/50
6s - loss: 0.0128 - val_loss: 0.0139
Epoch 47/50
7s - loss: 0.0128 - val_loss: 0.0143
Epoch 48/50
6s - loss: 0.0128 - val_loss: 0.0137
Epoch 49/50
6s - loss: 0.0128 - val_loss: 0.0139
Epoch 50/50
6s - loss: 0.0128 - val_loss: 0.0139

In [26]:
# plot history
pyplot.plot(history.history['loss'], label='train')
pyplot.plot(history.history['val_loss'], label='test')
pyplot.legend()
pyplot.show()



In [27]:
# make a prediction
yhat = model.predict(test_X)
test_X = test_X.reshape((test_X.shape[0], feature_num*forward_steps))  # Different from one-step forward

# invert scaling for forecast
inv_yhat = concatenate((yhat, test_X[:, -(feature_num-1):]), axis=1)  # Predict multiple labels
inv_yhat = scaler.inverse_transform(inv_yhat)
inv_yhat = inv_yhat[:,0]

# invert scaling for actual
test_y = test_y.reshape((len(test_y), 1))
inv_y = concatenate((test_y, test_X[:, -(feature_num-1):]), axis=1)  # Predict multiple labels
inv_y = scaler.inverse_transform(inv_y)
inv_y = inv_y[:,0]

# calculate RMSE
rmse = sqrt(mean_squared_error(inv_y, inv_yhat))
print('Test RMSE: %.3f' % rmse)


Test RMSE: 26.026