In [1]:
import time
import pandas as pd
import numpy as np

from keras.layers.core import Dense, Activation, Dropout
from keras.layers.recurrent import LSTM
from keras.models import Sequential
from sklearn.metrics import mean_squared_error
from sklearn.utils import shuffle

from sklearn.preprocessing import MinMaxScaler
from sklearn.preprocessing import StandardScaler
from sklearn.metrics import mean_squared_error
from sklearn.metrics import mean_absolute_error
epochs = [30, 50, 100, 200]


Using TensorFlow backend.

In [2]:
epoch = 200
# Load data
train = pd.read_csv('trips_train_3.csv', header=None)
test = pd.read_csv('trips_test_2.csv', header=None )
scaler = MinMaxScaler(feature_range=(-1, 1))
window_size = 78  # 78 steps in one day
# normalize features
scaled = scaler.fit_transform(train.values)
train = pd.DataFrame(scaled)

series_s = train.copy()
for i in range(window_size):
    train = pd.concat([train, series_s.shift(-(i+1))], axis=1)

train.dropna(axis=0, inplace=True)
# Hacer lo mismo para los datos de prueba
test = test.iloc[:24624, :]  # The rest are all 0s
scaled = scaler.fit_transform(test.values)
test = pd.DataFrame(scaled)

series_s = test.copy()
for i in range(window_size):
    test = pd.concat([test, series_s.shift(-(i+1))], axis = 1)

test.dropna(axis=0, inplace=True)
train = shuffle(train)
train_X = train.iloc[:,:-1]
train_y = train.iloc[:,-1]
test_X = test.iloc[:,:-1]
test_y = test.iloc[:,-1]
train_X = train_X.values
train_y = train_y.values
test_X = test_X.values
test_y = test_y.values
train_X = train_X.reshape(train_X.shape[0],train_X.shape[1],1)
test_X = test_X.reshape(test_X.shape[0],test_X.shape[1],1)

# Define the LSTM model
model = Sequential()
model.add(LSTM(input_shape=(window_size,1), output_dim=window_size, return_sequences=True))
model.add(Dropout(0.5))
model.add(LSTM(256))
model.add(Dropout(0.5))
model.add(Dense(1))
model.add(Activation("linear"))
model.compile(loss="mse", optimizer="adam")
model.summary()
# Train
start = time.time()
model.fit(train_X, train_y, batch_size=100, epochs=epoch, validation_split=0.1)
print("> Compilation Time : ", time.time() - start)

def moving_test_window_preds(n_future_preds):
    ''' n_future_preds - Represents the number of future predictions we want to make
                        This coincides with the number of windows that we will move forward
                        on the test data
    '''
    preds_moving = []                                    # Use this to store the prediction made on each test window
    moving_test_window = [test_X[0,:].tolist()]          # Creating the first test window
    moving_test_window = np.array(moving_test_window)    # Making it an numpy array

    for i in range(n_future_preds):
        preds_one_step = model.predict(moving_test_window) # Note that this is already a scaled prediction so no need to rescale this
        preds_moving.append(preds_one_step[0,0]) # get the value from the numpy 2D array and append to predictions
        preds_one_step = preds_one_step.reshape(1,1,1) # Reshaping the prediction to 3D array for concatenation with moving test window
        moving_test_window = np.concatenate((moving_test_window[:,1:,:], preds_one_step), axis=1) # This is the new moving test window, where the first element from the window has been removed and the prediction  has been appended to the end

    preds_moving = scaler.inverse_transform(np.array(preds_moving).reshape(-1, 1))

    return preds_moving

preds_moving = moving_test_window_preds(500)
actuals = scaler.inverse_transform(test_y.reshape(-1, 1))
mse = mean_squared_error(actuals[74:150], preds_moving[74:150])
mae = mean_absolute_error(actuals[74:150], preds_moving[74:150])

# Save data
with open('f_%s_%s_%s.txt' % (epoch, mse, mae), 'w') as f:
    for i in preds_moving:
        f.write("%s\n" % i)


c:\users\kuno\miniconda3\lib\site-packages\sklearn\utils\validation.py:429: DataConversionWarning: Data with input dtype int64 was converted to float64 by MinMaxScaler.
  warnings.warn(msg, _DataConversionWarning)
c:\users\kuno\miniconda3\lib\site-packages\ipykernel_launcher.py:40: UserWarning: Update your `LSTM` call to the Keras 2 API: `LSTM(return_sequences=True, input_shape=(78, 1), units=78)`
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
lstm_1 (LSTM)                (None, 78, 78)            24960     
_________________________________________________________________
dropout_1 (Dropout)          (None, 78, 78)            0         
_________________________________________________________________
lstm_2 (LSTM)                (None, 256)               343040    
_________________________________________________________________
dropout_2 (Dropout)          (None, 256)               0         
_________________________________________________________________
dense_1 (Dense)              (None, 1)                 257       
_________________________________________________________________
activation_1 (Activation)    (None, 1)                 0         
=================================================================
Total params: 368,257
Trainable params: 368,257
Non-trainable params: 0
_________________________________________________________________
Train on 35634 samples, validate on 3960 samples
Epoch 1/200
35634/35634 [==============================] - 300s 8ms/step - loss: 0.0300 - val_loss: 0.0140
Epoch 2/200
35634/35634 [==============================] - 315s 9ms/step - loss: 0.0176 - val_loss: 0.0134
Epoch 3/200
35634/35634 [==============================] - 326s 9ms/step - loss: 0.0152 - val_loss: 0.0109
Epoch 4/200
35634/35634 [==============================] - 328s 9ms/step - loss: 0.0143 - val_loss: 0.0102
Epoch 5/200
35634/35634 [==============================] - 333s 9ms/step - loss: 0.0136 - val_loss: 0.0104
Epoch 6/200
35634/35634 [==============================] - 333s 9ms/step - loss: 0.0129 - val_loss: 0.0092
Epoch 7/200
35634/35634 [==============================] - 336s 9ms/step - loss: 0.0123 - val_loss: 0.0098
Epoch 8/200
35634/35634 [==============================] - 343s 10ms/step - loss: 0.0117 - val_loss: 0.0088
Epoch 9/200
35634/35634 [==============================] - 348s 10ms/step - loss: 0.0111 - val_loss: 0.0096
Epoch 10/200
35634/35634 [==============================] - 345s 10ms/step - loss: 0.0109 - val_loss: 0.0079
Epoch 11/200
35634/35634 [==============================] - 352s 10ms/step - loss: 0.0103 - val_loss: 0.0074
Epoch 12/200
35634/35634 [==============================] - 358s 10ms/step - loss: 0.0098 - val_loss: 0.0071
Epoch 13/200
35634/35634 [==============================] - 357s 10ms/step - loss: 0.0093 - val_loss: 0.0066
Epoch 14/200
35634/35634 [==============================] - 358s 10ms/step - loss: 0.0085 - val_loss: 0.0060
Epoch 15/200
35634/35634 [==============================] - 358s 10ms/step - loss: 0.0083 - val_loss: 0.0064
Epoch 16/200
35634/35634 [==============================] - 359s 10ms/step - loss: 0.0078 - val_loss: 0.0057
Epoch 17/200
35634/35634 [==============================] - 358s 10ms/step - loss: 0.0078 - val_loss: 0.0053
Epoch 18/200
35634/35634 [==============================] - 361s 10ms/step - loss: 0.0074 - val_loss: 0.0057
Epoch 19/200
35634/35634 [==============================] - 360s 10ms/step - loss: 0.0073 - val_loss: 0.0054
Epoch 20/200
35634/35634 [==============================] - 357s 10ms/step - loss: 0.0071 - val_loss: 0.0050
Epoch 21/200
35634/35634 [==============================] - 359s 10ms/step - loss: 0.0074 - val_loss: 0.0053
Epoch 22/200
35634/35634 [==============================] - 361s 10ms/step - loss: 0.0071 - val_loss: 0.0050
Epoch 23/200
35634/35634 [==============================] - 360s 10ms/step - loss: 0.0069 - val_loss: 0.0047
Epoch 24/200
35634/35634 [==============================] - 357s 10ms/step - loss: 0.0068 - val_loss: 0.0052
Epoch 25/200
35634/35634 [==============================] - 358s 10ms/step - loss: 0.0067 - val_loss: 0.0047
Epoch 26/200
35634/35634 [==============================] - 357s 10ms/step - loss: 0.0067 - val_loss: 0.0048
Epoch 27/200
35634/35634 [==============================] - 357s 10ms/step - loss: 0.0068 - val_loss: 0.0048
Epoch 28/200
35634/35634 [==============================] - 358s 10ms/step - loss: 0.0066 - val_loss: 0.0048
Epoch 29/200
35634/35634 [==============================] - 360s 10ms/step - loss: 0.0065 - val_loss: 0.0047
Epoch 30/200
35634/35634 [==============================] - 355s 10ms/step - loss: 0.0065 - val_loss: 0.0045
Epoch 31/200
35634/35634 [==============================] - 355s 10ms/step - loss: 0.0065 - val_loss: 0.0046
Epoch 32/200
35634/35634 [==============================] - 355s 10ms/step - loss: 0.0064 - val_loss: 0.0048
Epoch 33/200
35634/35634 [==============================] - 356s 10ms/step - loss: 0.0064 - val_loss: 0.0045
Epoch 34/200
35634/35634 [==============================] - 354s 10ms/step - loss: 0.0064 - val_loss: 0.0049
Epoch 35/200
35634/35634 [==============================] - 356s 10ms/step - loss: 0.0063 - val_loss: 0.0046
Epoch 36/200
35634/35634 [==============================] - 355s 10ms/step - loss: 0.0062 - val_loss: 0.0045
Epoch 37/200
35634/35634 [==============================] - 354s 10ms/step - loss: 0.0062 - val_loss: 0.0044
Epoch 38/200
35634/35634 [==============================] - 352s 10ms/step - loss: 0.0061 - val_loss: 0.0044
Epoch 39/200
35634/35634 [==============================] - 358s 10ms/step - loss: 0.0062 - val_loss: 0.0044
Epoch 40/200
35634/35634 [==============================] - 353s 10ms/step - loss: 0.0063 - val_loss: 0.0044
Epoch 41/200
35634/35634 [==============================] - 351s 10ms/step - loss: 0.0062 - val_loss: 0.0045
Epoch 42/200
35634/35634 [==============================] - 353s 10ms/step - loss: 0.0060 - val_loss: 0.0045
Epoch 43/200
35634/35634 [==============================] - 353s 10ms/step - loss: 0.0061 - val_loss: 0.0043
Epoch 44/200
35634/35634 [==============================] - 353s 10ms/step - loss: 0.0061 - val_loss: 0.0043
Epoch 45/200
35634/35634 [==============================] - 354s 10ms/step - loss: 0.0060 - val_loss: 0.0044
Epoch 46/200
35634/35634 [==============================] - 351s 10ms/step - loss: 0.0060 - val_loss: 0.0047
Epoch 47/200
35634/35634 [==============================] - 347s 10ms/step - loss: 0.0059 - val_loss: 0.0050
Epoch 48/200
35634/35634 [==============================] - 352s 10ms/step - loss: 0.0061 - val_loss: 0.0043
Epoch 49/200
35634/35634 [==============================] - 357s 10ms/step - loss: 0.0059 - val_loss: 0.0044
Epoch 50/200
35634/35634 [==============================] - 356s 10ms/step - loss: 0.0059 - val_loss: 0.0044
Epoch 51/200
35634/35634 [==============================] - 352s 10ms/step - loss: 0.0063 - val_loss: 0.0051
Epoch 52/200
35634/35634 [==============================] - 352s 10ms/step - loss: 0.0063 - val_loss: 0.0048
Epoch 53/200
35634/35634 [==============================] - 352s 10ms/step - loss: 0.0060 - val_loss: 0.0043
Epoch 54/200
35634/35634 [==============================] - 351s 10ms/step - loss: 0.0058 - val_loss: 0.0042
Epoch 55/200
35634/35634 [==============================] - 355s 10ms/step - loss: 0.0058 - val_loss: 0.0043
Epoch 56/200
35634/35634 [==============================] - 352s 10ms/step - loss: 0.0057 - val_loss: 0.0044
Epoch 57/200
35634/35634 [==============================] - 350s 10ms/step - loss: 0.0056 - val_loss: 0.0042
Epoch 58/200
35634/35634 [==============================] - 353s 10ms/step - loss: 0.0057 - val_loss: 0.0045
Epoch 59/200
35634/35634 [==============================] - 347s 10ms/step - loss: 0.0057 - val_loss: 0.0042
Epoch 60/200
35634/35634 [==============================] - 356s 10ms/step - loss: 0.0057 - val_loss: 0.0045
Epoch 61/200
35634/35634 [==============================] - 347s 10ms/step - loss: 0.0057 - val_loss: 0.0042
Epoch 62/200
35634/35634 [==============================] - 345s 10ms/step - loss: 0.0058 - val_loss: 0.0045
Epoch 63/200
35634/35634 [==============================] - 346s 10ms/step - loss: 0.0057 - val_loss: 0.0042
Epoch 64/200
35634/35634 [==============================] - 345s 10ms/step - loss: 0.0056 - val_loss: 0.0042
Epoch 65/200
35634/35634 [==============================] - 341s 10ms/step - loss: 0.0057 - val_loss: 0.0048
Epoch 66/200
35634/35634 [==============================] - 343s 10ms/step - loss: 0.0057 - val_loss: 0.0043
Epoch 67/200
35634/35634 [==============================] - 345s 10ms/step - loss: 0.0057 - val_loss: 0.0041
Epoch 68/200
35634/35634 [==============================] - 345s 10ms/step - loss: 0.0070 - val_loss: 0.0045
Epoch 69/200
35634/35634 [==============================] - 345s 10ms/step - loss: 0.0057 - val_loss: 0.0041
Epoch 70/200
35634/35634 [==============================] - 348s 10ms/step - loss: 0.0057 - val_loss: 0.0043
Epoch 71/200
35634/35634 [==============================] - 343s 10ms/step - loss: 0.0055 - val_loss: 0.0043
Epoch 72/200
35634/35634 [==============================] - 341s 10ms/step - loss: 0.0058 - val_loss: 0.0043
Epoch 73/200
35634/35634 [==============================] - 339s 10ms/step - loss: 0.0057 - val_loss: 0.0046
Epoch 74/200
35634/35634 [==============================] - 338s 9ms/step - loss: 0.0055 - val_loss: 0.0043
Epoch 75/200
35634/35634 [==============================] - 335s 9ms/step - loss: 0.0055 - val_loss: 0.0042
Epoch 76/200
35634/35634 [==============================] - 342s 10ms/step - loss: 0.0055 - val_loss: 0.0042
Epoch 77/200
35634/35634 [==============================] - 337s 9ms/step - loss: 0.0055 - val_loss: 0.0043
Epoch 78/200
35634/35634 [==============================] - 337s 9ms/step - loss: 0.0055 - val_loss: 0.0043
Epoch 79/200
35634/35634 [==============================] - 339s 10ms/step - loss: 0.0055 - val_loss: 0.0043
Epoch 80/200
35634/35634 [==============================] - 338s 9ms/step - loss: 0.0056 - val_loss: 0.0047
Epoch 81/200
35634/35634 [==============================] - 341s 10ms/step - loss: 0.0056 - val_loss: 0.0042
Epoch 82/200
35634/35634 [==============================] - 336s 9ms/step - loss: 0.0055 - val_loss: 0.0042
Epoch 83/200
35634/35634 [==============================] - 338s 9ms/step - loss: 0.0054 - val_loss: 0.0043
Epoch 84/200
35634/35634 [==============================] - 337s 9ms/step - loss: 0.0054 - val_loss: 0.0042
Epoch 85/200
35634/35634 [==============================] - 335s 9ms/step - loss: 0.0054 - val_loss: 0.0043
Epoch 86/200
35634/35634 [==============================] - 338s 9ms/step - loss: 0.0055 - val_loss: 0.0042
Epoch 87/200
35634/35634 [==============================] - 336s 9ms/step - loss: 0.0055 - val_loss: 0.0043
Epoch 88/200
35634/35634 [==============================] - 334s 9ms/step - loss: 0.0054 - val_loss: 0.0041
Epoch 89/200
35634/35634 [==============================] - 336s 9ms/step - loss: 0.0054 - val_loss: 0.0042
Epoch 90/200
35634/35634 [==============================] - 333s 9ms/step - loss: 0.0053 - val_loss: 0.0041
Epoch 91/200
35634/35634 [==============================] - 337s 9ms/step - loss: 0.0053 - val_loss: 0.0044
Epoch 92/200
35634/35634 [==============================] - 340s 10ms/step - loss: 0.0054 - val_loss: 0.0042
Epoch 93/200
35634/35634 [==============================] - 337s 9ms/step - loss: 0.0053 - val_loss: 0.0043
Epoch 94/200
35634/35634 [==============================] - 337s 9ms/step - loss: 0.0054 - val_loss: 0.0044
Epoch 95/200
35634/35634 [==============================] - 334s 9ms/step - loss: 0.0053 - val_loss: 0.0043
Epoch 96/200
35634/35634 [==============================] - 332s 9ms/step - loss: 0.0054 - val_loss: 0.0041
Epoch 97/200
35634/35634 [==============================] - 335s 9ms/step - loss: 0.0054 - val_loss: 0.0045
Epoch 98/200
35634/35634 [==============================] - 333s 9ms/step - loss: 0.0053 - val_loss: 0.0042
Epoch 99/200
35634/35634 [==============================] - 333s 9ms/step - loss: 0.0053 - val_loss: 0.0042
Epoch 100/200
35634/35634 [==============================] - 336s 9ms/step - loss: 0.0054 - val_loss: 0.0044
Epoch 101/200
35634/35634 [==============================] - 334s 9ms/step - loss: 0.0053 - val_loss: 0.0043
Epoch 102/200
35634/35634 [==============================] - 341s 10ms/step - loss: 0.0053 - val_loss: 0.0043
Epoch 103/200
35634/35634 [==============================] - 341s 10ms/step - loss: 0.0052 - val_loss: 0.0046
Epoch 104/200
35634/35634 [==============================] - 344s 10ms/step - loss: 0.0052 - val_loss: 0.0042
Epoch 105/200
35634/35634 [==============================] - 343s 10ms/step - loss: 0.0052 - val_loss: 0.0043
Epoch 106/200
35634/35634 [==============================] - 347s 10ms/step - loss: 0.0054 - val_loss: 0.0042
Epoch 107/200
35634/35634 [==============================] - 349s 10ms/step - loss: 0.0053 - val_loss: 0.0041
Epoch 108/200
35634/35634 [==============================] - 354s 10ms/step - loss: 0.0052 - val_loss: 0.0042
Epoch 109/200
35634/35634 [==============================] - 358s 10ms/step - loss: 0.0051 - val_loss: 0.0043
Epoch 110/200
35634/35634 [==============================] - 358s 10ms/step - loss: 0.0052 - val_loss: 0.0041
Epoch 111/200
35634/35634 [==============================] - 361s 10ms/step - loss: 0.0052 - val_loss: 0.0043
Epoch 112/200
35634/35634 [==============================] - 365s 10ms/step - loss: 0.0052 - val_loss: 0.0043
Epoch 113/200
35634/35634 [==============================] - 367s 10ms/step - loss: 0.0052 - val_loss: 0.0041
Epoch 114/200
35634/35634 [==============================] - 367s 10ms/step - loss: 0.0052 - val_loss: 0.0041
Epoch 115/200
35634/35634 [==============================] - 368s 10ms/step - loss: 0.0052 - val_loss: 0.0041
Epoch 116/200
35634/35634 [==============================] - 392s 11ms/step - loss: 0.0052 - val_loss: 0.0045
Epoch 117/200
35634/35634 [==============================] - 374s 10ms/step - loss: 0.0051 - val_loss: 0.0041
Epoch 118/200
35634/35634 [==============================] - 372s 10ms/step - loss: 0.0051 - val_loss: 0.0044
Epoch 119/200
35634/35634 [==============================] - 377s 11ms/step - loss: 0.0051 - val_loss: 0.0041
Epoch 120/200
35634/35634 [==============================] - 375s 11ms/step - loss: 0.0051 - val_loss: 0.0042
Epoch 121/200
35634/35634 [==============================] - 374s 11ms/step - loss: 0.0051 - val_loss: 0.0043
Epoch 122/200
35634/35634 [==============================] - 376s 11ms/step - loss: 0.0051 - val_loss: 0.0043
Epoch 123/200
35634/35634 [==============================] - 378s 11ms/step - loss: 0.0051 - val_loss: 0.0042
Epoch 124/200
35634/35634 [==============================] - 379s 11ms/step - loss: 0.0051 - val_loss: 0.0042
Epoch 125/200
35634/35634 [==============================] - 379s 11ms/step - loss: 0.0050 - val_loss: 0.0040
Epoch 126/200
35634/35634 [==============================] - 378s 11ms/step - loss: 0.0050 - val_loss: 0.0041
Epoch 127/200
35634/35634 [==============================] - 380s 11ms/step - loss: 0.0050 - val_loss: 0.0042
Epoch 128/200
35634/35634 [==============================] - 378s 11ms/step - loss: 0.0052 - val_loss: 0.0041
Epoch 129/200
35634/35634 [==============================] - 380s 11ms/step - loss: 0.0050 - val_loss: 0.0042
Epoch 130/200
35634/35634 [==============================] - 381s 11ms/step - loss: 0.0050 - val_loss: 0.0041
Epoch 131/200
35634/35634 [==============================] - 383s 11ms/step - loss: 0.0049 - val_loss: 0.0041
Epoch 132/200
35634/35634 [==============================] - 385s 11ms/step - loss: 0.0050 - val_loss: 0.0042
Epoch 133/200
35634/35634 [==============================] - 380s 11ms/step - loss: 0.0051 - val_loss: 0.0042
Epoch 134/200
35634/35634 [==============================] - 381s 11ms/step - loss: 0.0049 - val_loss: 0.0044
Epoch 135/200
35634/35634 [==============================] - 380s 11ms/step - loss: 0.0054 - val_loss: 0.0043
Epoch 136/200
35634/35634 [==============================] - 378s 11ms/step - loss: 0.0049 - val_loss: 0.0041
Epoch 137/200
35634/35634 [==============================] - 381s 11ms/step - loss: 0.0049 - val_loss: 0.0042
Epoch 138/200
35634/35634 [==============================] - 380s 11ms/step - loss: 0.0049 - val_loss: 0.0042
Epoch 139/200
35634/35634 [==============================] - 381s 11ms/step - loss: 0.0049 - val_loss: 0.0042
Epoch 140/200
35634/35634 [==============================] - 376s 11ms/step - loss: 0.0049 - val_loss: 0.0041
Epoch 141/200
35634/35634 [==============================] - 377s 11ms/step - loss: 0.0049 - val_loss: 0.0043
Epoch 142/200
35634/35634 [==============================] - 377s 11ms/step - loss: 0.0049 - val_loss: 0.0043
Epoch 143/200
35634/35634 [==============================] - 377s 11ms/step - loss: 0.0049 - val_loss: 0.0044
Epoch 144/200
35634/35634 [==============================] - 377s 11ms/step - loss: 0.0049 - val_loss: 0.0041
Epoch 145/200
35634/35634 [==============================] - 379s 11ms/step - loss: 0.0048 - val_loss: 0.0042
Epoch 146/200
35634/35634 [==============================] - 376s 11ms/step - loss: 0.0049 - val_loss: 0.0042
Epoch 147/200
35634/35634 [==============================] - 376s 11ms/step - loss: 0.0049 - val_loss: 0.0044
Epoch 148/200
35634/35634 [==============================] - 376s 11ms/step - loss: 0.0049 - val_loss: 0.0042
Epoch 149/200
35634/35634 [==============================] - 373s 10ms/step - loss: 0.0048 - val_loss: 0.0044
Epoch 150/200
35634/35634 [==============================] - 376s 11ms/step - loss: 0.0048 - val_loss: 0.0042
Epoch 151/200
35634/35634 [==============================] - 375s 11ms/step - loss: 0.0048 - val_loss: 0.0045
Epoch 152/200
35634/35634 [==============================] - 372s 10ms/step - loss: 0.0050 - val_loss: 0.0042
Epoch 153/200
35634/35634 [==============================] - 376s 11ms/step - loss: 0.0049 - val_loss: 0.0041
Epoch 154/200
35634/35634 [==============================] - 372s 10ms/step - loss: 0.0048 - val_loss: 0.0045
Epoch 155/200
35634/35634 [==============================] - 376s 11ms/step - loss: 0.0048 - val_loss: 0.0043
Epoch 156/200
35634/35634 [==============================] - 373s 10ms/step - loss: 0.0048 - val_loss: 0.0046
Epoch 157/200
35634/35634 [==============================] - 372s 10ms/step - loss: 0.0047 - val_loss: 0.0042
Epoch 158/200
35634/35634 [==============================] - 370s 10ms/step - loss: 0.0047 - val_loss: 0.0046
Epoch 159/200
35634/35634 [==============================] - 371s 10ms/step - loss: 0.0048 - val_loss: 0.0044
Epoch 160/200
35634/35634 [==============================] - 374s 10ms/step - loss: 0.0047 - val_loss: 0.0043
Epoch 161/200
35634/35634 [==============================] - 375s 11ms/step - loss: 0.0047 - val_loss: 0.0045
Epoch 162/200
35634/35634 [==============================] - 375s 11ms/step - loss: 0.0048 - val_loss: 0.0043
Epoch 163/200
35634/35634 [==============================] - 374s 10ms/step - loss: 0.0048 - val_loss: 0.0043
Epoch 164/200
35634/35634 [==============================] - 374s 10ms/step - loss: 0.0047 - val_loss: 0.0042
Epoch 165/200
35634/35634 [==============================] - 375s 11ms/step - loss: 0.0047 - val_loss: 0.0043
Epoch 166/200
35634/35634 [==============================] - 372s 10ms/step - loss: 0.0046 - val_loss: 0.0043
Epoch 167/200
35634/35634 [==============================] - 373s 10ms/step - loss: 0.0047 - val_loss: 0.0041
Epoch 168/200
35634/35634 [==============================] - 372s 10ms/step - loss: 0.0047 - val_loss: 0.0046
Epoch 169/200
35634/35634 [==============================] - 373s 10ms/step - loss: 0.0046 - val_loss: 0.0045
Epoch 170/200
35634/35634 [==============================] - 375s 11ms/step - loss: 0.0047 - val_loss: 0.0042
Epoch 171/200
35634/35634 [==============================] - 372s 10ms/step - loss: 0.0046 - val_loss: 0.0043
Epoch 172/200
35634/35634 [==============================] - 373s 10ms/step - loss: 0.0047 - val_loss: 0.0044
Epoch 173/200
35634/35634 [==============================] - 373s 10ms/step - loss: 0.0046 - val_loss: 0.0044
Epoch 174/200
35634/35634 [==============================] - 372s 10ms/step - loss: 0.0046 - val_loss: 0.0044
Epoch 175/200
35634/35634 [==============================] - 375s 11ms/step - loss: 0.0046 - val_loss: 0.0044
Epoch 176/200
35634/35634 [==============================] - 374s 10ms/step - loss: 0.0047 - val_loss: 0.0043
Epoch 177/200
35634/35634 [==============================] - 372s 10ms/step - loss: 0.0047 - val_loss: 0.0043
Epoch 178/200
35634/35634 [==============================] - 373s 10ms/step - loss: 0.0046 - val_loss: 0.0044
Epoch 179/200
35634/35634 [==============================] - 379s 11ms/step - loss: 0.0046 - val_loss: 0.0043
Epoch 180/200
35634/35634 [==============================] - 373s 10ms/step - loss: 0.0046 - val_loss: 0.0043
Epoch 181/200
35634/35634 [==============================] - 372s 10ms/step - loss: 0.0046 - val_loss: 0.0045
Epoch 182/200
35634/35634 [==============================] - 376s 11ms/step - loss: 0.0045 - val_loss: 0.0044
Epoch 183/200
35634/35634 [==============================] - 378s 11ms/step - loss: 0.0045 - val_loss: 0.0044
Epoch 184/200
35634/35634 [==============================] - 380s 11ms/step - loss: 0.0045 - val_loss: 0.0046
Epoch 185/200
35634/35634 [==============================] - 372s 10ms/step - loss: 0.0046 - val_loss: 0.0044
Epoch 186/200
35634/35634 [==============================] - 368s 10ms/step - loss: 0.0045 - val_loss: 0.0043
Epoch 187/200
35634/35634 [==============================] - 370s 10ms/step - loss: 0.0045 - val_loss: 0.0043
Epoch 188/200
35634/35634 [==============================] - 365s 10ms/step - loss: 0.0045 - val_loss: 0.0045
Epoch 189/200
35634/35634 [==============================] - 365s 10ms/step - loss: 0.0046 - val_loss: 0.0043
Epoch 190/200
35634/35634 [==============================] - 368s 10ms/step - loss: 0.0045 - val_loss: 0.0043
Epoch 191/200
35634/35634 [==============================] - 365s 10ms/step - loss: 0.0045 - val_loss: 0.0044
Epoch 192/200
35634/35634 [==============================] - 367s 10ms/step - loss: 0.0045 - val_loss: 0.0044
Epoch 193/200
35634/35634 [==============================] - 366s 10ms/step - loss: 0.0045 - val_loss: 0.0043
Epoch 194/200
35634/35634 [==============================] - 372s 10ms/step - loss: 0.0045 - val_loss: 0.0044
Epoch 195/200
35634/35634 [==============================] - 364s 10ms/step - loss: 0.0045 - val_loss: 0.0045
Epoch 196/200
35634/35634 [==============================] - 365s 10ms/step - loss: 0.0044 - val_loss: 0.0045
Epoch 197/200
35634/35634 [==============================] - 368s 10ms/step - loss: 0.0045 - val_loss: 0.0043
Epoch 198/200
35634/35634 [==============================] - 364s 10ms/step - loss: 0.0044 - val_loss: 0.0044
Epoch 199/200
35634/35634 [==============================] - 371s 10ms/step - loss: 0.0044 - val_loss: 0.0043
Epoch 200/200
35634/35634 [==============================] - 370s 10ms/step - loss: 0.0045 - val_loss: 0.0045
> Compilation Time :  71748.16842412949

In [4]:
from matplotlib import pyplot

pyplot.figure(figsize=(20,6))
pyplot.plot(actuals[:600])
pyplot.plot(preds_moving[:600])
pyplot.title("200 epochs")
pyplot.show()



In [8]:
with open("f_30_191360.34899787782_369.8666947640871.txt") as f:
    pyplot.figure(figsize=(20,6))
    pyplot.plot(actuals[:600])
    pyplot.plot(np.array([l[1:-2] for l in f.readlines()]))
    pyplot.title("30 epochs")
    pyplot.show()



In [9]:
with open("f_50_23261.834132086205_116.21821095441517.txt") as f:
    pyplot.figure(figsize=(20,6))
    pyplot.plot(actuals[:600])
    pyplot.plot(np.array([l[1:-2] for l in f.readlines()]))
    pyplot.title("50 epochs")
    pyplot.show()



In [11]:
with open("f_100_8694.5463661338_66.40398304085983.txt") as f:
    pyplot.figure(figsize=(20,6))
    pyplot.plot(actuals[:600])
    pyplot.plot(np.array([l[1:-2] for l in f.readlines()]))
    pyplot.title("100 epochs")
    pyplot.show()



In [20]:
model.predict(np.array([scaler.fit_transform(actuals[:78])]))


Out[20]:
array([[-0.84483016]], dtype=float32)

In [91]:
pyplot.figure(figsize=(20,6))
pyplot.plot(actuals[:156])
pyplot.plot(np.concatenate(([[0] for i in range(78)], scaler.inverse_transform(res))))
pyplot.show()



In [71]:
def predict_next(info, n):
    res = []
    for i in range(n):
        base = np.concatenate((scaler.fit_transform(info[i:78]), res)) if res else scaler.fit_transform(info[:78])
        pred = model.predict(np.array([base]))
        res.append(pred[0])
    return res

In [85]:
data_temblor = np.array([[int(l.strip())] for l in open("trips_19_sept.csv").readlines() if l.strip()])

In [102]:
pyplot.figure(figsize=(20,6))
pyplot.plot(data_temblor[:156])
pyplot.plot(np.concatenate(([[0] for i in range(78)], scaler.inverse_transform(predict_next(data_temblor, 50)))))
pyplot.show()


c:\users\kuno\miniconda3\lib\site-packages\sklearn\utils\validation.py:429: DataConversionWarning: Data with input dtype int32 was converted to float64 by MinMaxScaler.
  warnings.warn(msg, _DataConversionWarning)

In [117]:
n = 40
pyplot.figure(figsize=(20,6))
pyplot.plot(data_temblor[:560])
pyplot.plot(np.concatenate(([[0] for i in range(78 + n)], scaler.inverse_transform(predict_next(data_temblor[n:], 20)))))
pyplot.show()


c:\users\kuno\miniconda3\lib\site-packages\sklearn\utils\validation.py:429: DataConversionWarning: Data with input dtype int32 was converted to float64 by MinMaxScaler.
  warnings.warn(msg, _DataConversionWarning)

In [130]:
pyplot.figure(figsize=(20,6))
pyplot.plot(actuals[0:76])
pyplot.plot(actuals[76:152])
pyplot.plot(actuals[152:228])
pyplot.plot(actuals[228:304])
# for d in range(5):
#     pyplot.plot(actuals[78*d:78*(d+1)])
pyplot.show()



In [ ]: