In [28]:
%matplotlib inline

import numpy as np
import pandas
import matplotlib.pyplot as plt
from keras.models import Sequential
from keras.layers import Dense
from keras.layers import Dropout

In [18]:
dataframe = pandas.read_csv('international-airline-passengers.csv',
                          usecols=[1], engine='python')
type(dataframe)


Out[18]:
pandas.core.frame.DataFrame

In [3]:
plt.plot(dataset)
plt.show()



In [19]:
np.random.seed(7)
dataset = dataframe.values.astype(np.float32)
type(dataset)


Out[19]:
numpy.ndarray

In [77]:
def generate_data(dataset, look_back=1, test_split=0.25):
    x = []
    y = []
    for i in range(look_back, len(dataset) - 1):
        x.append(dataset[i - look_back:i, 0])
        y.append(dataset[i, 0])
    split_index = int(len(x) * (1. - test_split))
    return (np.array(x[:split_index]), np.array(y[:split_index])), (np.array(x[split_index:]), np.array(y[split_index:]))

In [174]:
look_back = 4
(X_train, Y_train), (X_test, Y_test) = generate_data(dataset, look_back)
for i in range(0, 5):
    print("%s -> %d" % (X_train[i], Y_train[i]))

print("Train shape: %s -> %s" % (X_train.shape, Y_train.shape))
print("Test shape: %s -> %s" % (X_test.shape, Y_test.shape))


[ 112.  118.  132.  129.] -> 121
[ 118.  132.  129.  121.] -> 135
[ 132.  129.  121.  135.] -> 148
[ 129.  121.  135.  148.] -> 148
[ 121.  135.  148.  148.] -> 136
Train shape: (104, 4) -> (104,)
Test shape: (35, 4) -> (35,)

In [175]:
model = Sequential()
model.add(Dense(8, input_dim=look_back, activation='relu'))
model.add(Dense(1))
model.compile(loss='mean_squared_error', optimizer='adam', accuracy=['metrics'])
print(model.summary())


____________________________________________________________________________________________________
Layer (type)                     Output Shape          Param #     Connected to                     
====================================================================================================
dense_38 (Dense)                 (None, 8)             40          dense_input_15[0][0]             
____________________________________________________________________________________________________
dense_39 (Dense)                 (None, 1)             9           dense_38[0][0]                   
====================================================================================================
Total params: 49
Trainable params: 49
Non-trainable params: 0
____________________________________________________________________________________________________
None

In [176]:
model.fit(X_train, Y_train, nb_epoch=200, batch_size=2, verbose=1)


Epoch 1/200
104/104 [==============================] - 0s - loss: 46799.8025      
Epoch 2/200
104/104 [==============================] - 0s - loss: 22859.5337     
Epoch 3/200
104/104 [==============================] - 0s - loss: 8347.4554      
Epoch 4/200
104/104 [==============================] - 0s - loss: 2728.6176     
Epoch 5/200
104/104 [==============================] - 0s - loss: 1718.5673     
Epoch 6/200
104/104 [==============================] - 0s - loss: 1603.9860     
Epoch 7/200
104/104 [==============================] - 0s - loss: 1603.8906     
Epoch 8/200
104/104 [==============================] - 0s - loss: 1598.7743     
Epoch 9/200
104/104 [==============================] - 0s - loss: 1596.2261     
Epoch 10/200
104/104 [==============================] - 0s - loss: 1599.0571     
Epoch 11/200
104/104 [==============================] - 0s - loss: 1577.8714     
Epoch 12/200
104/104 [==============================] - 0s - loss: 1581.8577     
Epoch 13/200
104/104 [==============================] - 0s - loss: 1563.0544     
Epoch 14/200
104/104 [==============================] - 0s - loss: 1580.8013     
Epoch 15/200
104/104 [==============================] - 0s - loss: 1546.9893     
Epoch 16/200
104/104 [==============================] - 0s - loss: 1542.4614     
Epoch 17/200
104/104 [==============================] - 0s - loss: 1537.3437     
Epoch 18/200
104/104 [==============================] - 0s - loss: 1529.4094     
Epoch 19/200
104/104 [==============================] - 0s - loss: 1521.6163     
Epoch 20/200
104/104 [==============================] - 0s - loss: 1521.3931     
Epoch 21/200
104/104 [==============================] - 0s - loss: 1514.3092     
Epoch 22/200
104/104 [==============================] - 0s - loss: 1516.1230     
Epoch 23/200
104/104 [==============================] - 0s - loss: 1514.5711     
Epoch 24/200
104/104 [==============================] - 0s - loss: 1548.2727     
Epoch 25/200
104/104 [==============================] - 0s - loss: 1490.0838     
Epoch 26/200
104/104 [==============================] - 0s - loss: 1495.3320      - ETA: 0s - loss: 971.6447 
Epoch 27/200
104/104 [==============================] - 0s - loss: 1471.4611     
Epoch 28/200
104/104 [==============================] - 0s - loss: 1471.6774     
Epoch 29/200
104/104 [==============================] - 0s - loss: 1494.2060     
Epoch 30/200
104/104 [==============================] - 0s - loss: 1441.5531     
Epoch 31/200
104/104 [==============================] - 0s - loss: 1449.7325     
Epoch 32/200
104/104 [==============================] - 0s - loss: 1454.2836     
Epoch 33/200
104/104 [==============================] - 0s - loss: 1455.3208     
Epoch 34/200
104/104 [==============================] - 0s - loss: 1428.8045     
Epoch 35/200
104/104 [==============================] - 0s - loss: 1403.5672     
Epoch 36/200
104/104 [==============================] - 0s - loss: 1425.6057     
Epoch 37/200
104/104 [==============================] - 0s - loss: 1438.3719     
Epoch 38/200
104/104 [==============================] - 0s - loss: 1417.0740     
Epoch 39/200
104/104 [==============================] - 0s - loss: 1403.7778     
Epoch 40/200
104/104 [==============================] - 0s - loss: 1384.6591     
Epoch 41/200
104/104 [==============================] - 0s - loss: 1375.2422     
Epoch 42/200
104/104 [==============================] - 0s - loss: 1366.5717     
Epoch 43/200
104/104 [==============================] - 0s - loss: 1369.6085      - ETA: 0s - loss: 1373.0268
Epoch 44/200
104/104 [==============================] - 0s - loss: 1356.7771     
Epoch 45/200
104/104 [==============================] - 0s - loss: 1353.2424     
Epoch 46/200
104/104 [==============================] - 0s - loss: 1357.6665     
Epoch 47/200
104/104 [==============================] - 0s - loss: 1329.1396     
Epoch 48/200
104/104 [==============================] - 0s - loss: 1359.0784     
Epoch 49/200
104/104 [==============================] - 0s - loss: 1335.5664     
Epoch 50/200
104/104 [==============================] - 0s - loss: 1342.2517     
Epoch 51/200
104/104 [==============================] - 0s - loss: 1328.0869     
Epoch 52/200
104/104 [==============================] - 0s - loss: 1316.9823     
Epoch 53/200
104/104 [==============================] - 0s - loss: 1393.5201     
Epoch 54/200
104/104 [==============================] - 0s - loss: 1339.4356     
Epoch 55/200
104/104 [==============================] - 0s - loss: 1309.6792     
Epoch 56/200
104/104 [==============================] - 0s - loss: 1303.3782     
Epoch 57/200
104/104 [==============================] - 0s - loss: 1281.8108     
Epoch 58/200
104/104 [==============================] - 0s - loss: 1286.9878     
Epoch 59/200
104/104 [==============================] - 0s - loss: 1274.3120     
Epoch 60/200
104/104 [==============================] - 0s - loss: 1291.7507     
Epoch 61/200
104/104 [==============================] - 0s - loss: 1273.3344     
Epoch 62/200
104/104 [==============================] - 0s - loss: 1279.4750     
Epoch 63/200
104/104 [==============================] - 0s - loss: 1261.1617     
Epoch 64/200
104/104 [==============================] - 0s - loss: 1289.5229     
Epoch 65/200
104/104 [==============================] - 0s - loss: 1234.5573     
Epoch 66/200
104/104 [==============================] - 0s - loss: 1256.1729     
Epoch 67/200
104/104 [==============================] - 0s - loss: 1233.9092     
Epoch 68/200
104/104 [==============================] - 0s - loss: 1230.1394     
Epoch 69/200
104/104 [==============================] - 0s - loss: 1235.5836     
Epoch 70/200
104/104 [==============================] - 0s - loss: 1220.1805     
Epoch 71/200
104/104 [==============================] - 0s - loss: 1211.1810     
Epoch 72/200
104/104 [==============================] - 0s - loss: 1222.7822     
Epoch 73/200
104/104 [==============================] - 0s - loss: 1225.5980      - ETA: 0s - loss: 1185.3223
Epoch 74/200
104/104 [==============================] - 0s - loss: 1219.9387     
Epoch 75/200
104/104 [==============================] - 0s - loss: 1210.3341     
Epoch 76/200
104/104 [==============================] - 0s - loss: 1210.0678     
Epoch 77/200
104/104 [==============================] - 0s - loss: 1213.7048     
Epoch 78/200
104/104 [==============================] - 0s - loss: 1230.2590     
Epoch 79/200
104/104 [==============================] - 0s - loss: 1230.5137     
Epoch 80/200
104/104 [==============================] - 0s - loss: 1181.9853     
Epoch 81/200
104/104 [==============================] - 0s - loss: 1201.8724     
Epoch 82/200
104/104 [==============================] - 0s - loss: 1188.6774     
Epoch 83/200
104/104 [==============================] - 0s - loss: 1198.5377     
Epoch 84/200
104/104 [==============================] - 0s - loss: 1171.9508     
Epoch 85/200
104/104 [==============================] - 0s - loss: 1167.2733     
Epoch 86/200
104/104 [==============================] - 0s - loss: 1149.7039     
Epoch 87/200
104/104 [==============================] - 0s - loss: 1145.6141     
Epoch 88/200
104/104 [==============================] - 0s - loss: 1163.5758     
Epoch 89/200
104/104 [==============================] - 0s - loss: 1135.3866     
Epoch 90/200
104/104 [==============================] - 0s - loss: 1140.7271      - ETA: 0s - loss: 690.4512
Epoch 91/200
104/104 [==============================] - 0s - loss: 1155.4864     
Epoch 92/200
104/104 [==============================] - 0s - loss: 1147.0915     
Epoch 93/200
104/104 [==============================] - 0s - loss: 1155.6647     
Epoch 94/200
104/104 [==============================] - 0s - loss: 1131.2741     
Epoch 95/200
104/104 [==============================] - 0s - loss: 1122.3207     
Epoch 96/200
104/104 [==============================] - 0s - loss: 1115.6058     
Epoch 97/200
104/104 [==============================] - 0s - loss: 1112.0893     
Epoch 98/200
104/104 [==============================] - 0s - loss: 1156.3299     
Epoch 99/200
104/104 [==============================] - 0s - loss: 1137.4834     
Epoch 100/200
104/104 [==============================] - 0s - loss: 1119.9283     
Epoch 101/200
104/104 [==============================] - 0s - loss: 1139.4034     
Epoch 102/200
104/104 [==============================] - 0s - loss: 1136.7840     
Epoch 103/200
104/104 [==============================] - 0s - loss: 1097.7640     
Epoch 104/200
104/104 [==============================] - 0s - loss: 1089.9767     
Epoch 105/200
104/104 [==============================] - 0s - loss: 1098.8682     
Epoch 106/200
104/104 [==============================] - 0s - loss: 1101.0866     
Epoch 107/200
104/104 [==============================] - 0s - loss: 1089.1694     
Epoch 108/200
104/104 [==============================] - 0s - loss: 1069.0554     
Epoch 109/200
104/104 [==============================] - 0s - loss: 1075.9834     
Epoch 110/200
104/104 [==============================] - 0s - loss: 1086.0455     
Epoch 111/200
104/104 [==============================] - 0s - loss: 1070.4463     
Epoch 112/200
104/104 [==============================] - 0s - loss: 1074.9760     
Epoch 113/200
104/104 [==============================] - 0s - loss: 1049.5708      - ETA: 0s - loss: 1186.4200
Epoch 114/200
104/104 [==============================] - 0s - loss: 1081.3469     
Epoch 115/200
104/104 [==============================] - 0s - loss: 1090.7742     
Epoch 116/200
104/104 [==============================] - 0s - loss: 1067.4584     
Epoch 117/200
104/104 [==============================] - 0s - loss: 1046.6488     
Epoch 118/200
104/104 [==============================] - 0s - loss: 1061.5143     
Epoch 119/200
104/104 [==============================] - 0s - loss: 1063.2573     
Epoch 120/200
104/104 [==============================] - 0s - loss: 1055.0051     
Epoch 121/200
104/104 [==============================] - 0s - loss: 1045.4761     
Epoch 122/200
104/104 [==============================] - 0s - loss: 1026.3183     
Epoch 123/200
104/104 [==============================] - 0s - loss: 1068.0254     
Epoch 124/200
104/104 [==============================] - 0s - loss: 1023.9396     
Epoch 125/200
104/104 [==============================] - 0s - loss: 1026.7221     
Epoch 126/200
104/104 [==============================] - 0s - loss: 1029.5200     
Epoch 127/200
104/104 [==============================] - 0s - loss: 1043.9195     
Epoch 128/200
104/104 [==============================] - 0s - loss: 1006.6461     
Epoch 129/200
104/104 [==============================] - 0s - loss: 1014.8684     
Epoch 130/200
104/104 [==============================] - 0s - loss: 1007.0368     
Epoch 131/200
104/104 [==============================] - 0s - loss: 1000.7274     
Epoch 132/200
104/104 [==============================] - 0s - loss: 1002.6962     
Epoch 133/200
104/104 [==============================] - 0s - loss: 1022.1522     
Epoch 134/200
104/104 [==============================] - 0s - loss: 1006.9923     
Epoch 135/200
104/104 [==============================] - 0s - loss: 989.0887      
Epoch 136/200
104/104 [==============================] - 0s - loss: 993.0906      
Epoch 137/200
104/104 [==============================] - 0s - loss: 977.7094      
Epoch 138/200
104/104 [==============================] - 0s - loss: 980.8249      
Epoch 139/200
104/104 [==============================] - 0s - loss: 997.9208       - ETA: 0s - loss: 909.5961 
Epoch 140/200
104/104 [==============================] - 0s - loss: 1018.4718     
Epoch 141/200
104/104 [==============================] - 0s - loss: 991.3449       - ETA: 0s - loss: 1229.0559
Epoch 142/200
104/104 [==============================] - 0s - loss: 975.5056      
Epoch 143/200
104/104 [==============================] - 0s - loss: 974.6707      
Epoch 144/200
104/104 [==============================] - 0s - loss: 990.7949      
Epoch 145/200
104/104 [==============================] - 0s - loss: 960.9421      
Epoch 146/200
104/104 [==============================] - 0s - loss: 966.5694     
Epoch 147/200
104/104 [==============================] - 0s - loss: 980.9610      
Epoch 148/200
104/104 [==============================] - 0s - loss: 967.8587      
Epoch 149/200
104/104 [==============================] - 0s - loss: 971.4164     
Epoch 150/200
104/104 [==============================] - 0s - loss: 943.4771      
Epoch 151/200
104/104 [==============================] - 0s - loss: 949.5490      
Epoch 152/200
104/104 [==============================] - 0s - loss: 945.0852      
Epoch 153/200
104/104 [==============================] - 0s - loss: 936.4766      
Epoch 154/200
104/104 [==============================] - 0s - loss: 946.9060      
Epoch 155/200
104/104 [==============================] - 0s - loss: 935.5833      - ETA: 0s - loss: 866.0010
Epoch 156/200
104/104 [==============================] - 0s - loss: 948.4619      
Epoch 157/200
104/104 [==============================] - 0s - loss: 949.6055      
Epoch 158/200
104/104 [==============================] - 0s - loss: 936.8289      
Epoch 159/200
104/104 [==============================] - 0s - loss: 935.3966     
Epoch 160/200
104/104 [==============================] - 0s - loss: 927.3501     
Epoch 161/200
104/104 [==============================] - 0s - loss: 937.0055      
Epoch 162/200
104/104 [==============================] - 0s - loss: 926.9608      
Epoch 163/200
104/104 [==============================] - 0s - loss: 919.9161      
Epoch 164/200
104/104 [==============================] - 0s - loss: 913.7098     
Epoch 165/200
104/104 [==============================] - 0s - loss: 910.7264     
Epoch 166/200
104/104 [==============================] - 0s - loss: 923.8227     
Epoch 167/200
104/104 [==============================] - 0s - loss: 905.6714      
Epoch 168/200
104/104 [==============================] - 0s - loss: 930.4608     
Epoch 169/200
104/104 [==============================] - 0s - loss: 922.7264      
Epoch 170/200
104/104 [==============================] - 0s - loss: 912.8424      
Epoch 171/200
104/104 [==============================] - 0s - loss: 933.7193      
Epoch 172/200
104/104 [==============================] - 0s - loss: 895.3663     
Epoch 173/200
104/104 [==============================] - 0s - loss: 904.2729      
Epoch 174/200
104/104 [==============================] - 0s - loss: 898.9341      
Epoch 175/200
104/104 [==============================] - 0s - loss: 931.3408      
Epoch 176/200
104/104 [==============================] - 0s - loss: 899.2227      
Epoch 177/200
104/104 [==============================] - 0s - loss: 886.3063      
Epoch 178/200
104/104 [==============================] - 0s - loss: 898.0388      
Epoch 179/200
104/104 [==============================] - 0s - loss: 887.0008      
Epoch 180/200
104/104 [==============================] - 0s - loss: 883.3192      
Epoch 181/200
104/104 [==============================] - 0s - loss: 880.9856      
Epoch 182/200
104/104 [==============================] - 0s - loss: 892.7298      
Epoch 183/200
104/104 [==============================] - 0s - loss: 911.1639      
Epoch 184/200
104/104 [==============================] - 0s - loss: 911.2749     
Epoch 185/200
104/104 [==============================] - 0s - loss: 874.0329      
Epoch 186/200
104/104 [==============================] - 0s - loss: 892.7414     
Epoch 187/200
104/104 [==============================] - 0s - loss: 866.1042      
Epoch 188/200
104/104 [==============================] - 0s - loss: 856.4392      
Epoch 189/200
104/104 [==============================] - 0s - loss: 889.8978      
Epoch 190/200
104/104 [==============================] - 0s - loss: 910.9307      
Epoch 191/200
104/104 [==============================] - 0s - loss: 869.1779     
Epoch 192/200
104/104 [==============================] - 0s - loss: 862.1322      
Epoch 193/200
104/104 [==============================] - 0s - loss: 857.1207      
Epoch 194/200
104/104 [==============================] - 0s - loss: 873.8009      
Epoch 195/200
104/104 [==============================] - 0s - loss: 850.0377     
Epoch 196/200
104/104 [==============================] - 0s - loss: 843.8702      
Epoch 197/200
104/104 [==============================] - 0s - loss: 853.9102      
Epoch 198/200
104/104 [==============================] - 0s - loss: 861.8030      
Epoch 199/200
104/104 [==============================] - 0s - loss: 848.4334     
Epoch 200/200
104/104 [==============================] - 0s - loss: 837.2601     
Out[176]:
<keras.callbacks.History at 0x7f6edc068b10>

In [172]:
score = model.evaluate(X_test, Y_test)
print("Score: %1.4f" % score)


32/36 [=========================>....] - ETA: 0sScore: 2517.1370

The output is pretty close to the input.


In [173]:
trainedPrediction = model.predict(X_train)
testedPrediction = model.predict(X_test)
plt.plot(dataset)
plt.plot(trainedPrediction)
plt.plot(np.concatenate((np.zeros_like(X_train), testedPrediction)))
plt.show()



In [ ]: