Model Category 2: Using the calendar features only

The second model category will only use calendar features (dummies for holiday, weekday, hour and month) to create a forecast for the electricity load.

Model category specific configuration

These parameters are model category specific


In [5]:
# Model category name used throughout the subsequent analysis
model_cat_id = "02"

# Which features from the dataset should be loaded:
# ['all', 'actual', 'entsoe', 'weather_t', 'weather_i', 'holiday', 'weekday', 'hour', 'month']
features = ['actual', 'calendar']

# LSTM Layer configuration
# ========================
# Stateful True or false
layer_conf = [ True, True, True ]
# Number of neurons per layer
cells = [[ 5, 10, 20, 30, 50, 75, 100, 125, 150 ], [0, 10, 20, 50], [0, 10, 15, 20]]
# Regularization per layer
dropout = [0, 0.1, 0.2]
# Size of how many samples are used for one forward/backward pass
batch_size = [8]
# In a sense this is the output neuron dimension, or how many timesteps the neuron should output. Currently not implemented, defaults to 1.
timesteps = [1]

Module imports


In [6]:
import os
import sys
import math
import itertools
import datetime as dt
import pytz
import time as t
import numpy as np
import pandas as pd
from pandas import read_csv
from pandas import datetime
from numpy import newaxis

import matplotlib as mpl

import matplotlib.pyplot as plt
import scipy.stats as stats
from statsmodels.tsa import stattools
from tabulate import tabulate

import math
import keras as keras
from keras import backend as K
from keras.models import Sequential
from keras.layers import Activation, Dense, Dropout, LSTM
from keras.callbacks import TensorBoard
from keras.utils import np_utils
from keras.models import load_model

from sklearn.preprocessing import StandardScaler
from sklearn.metrics import mean_squared_error, mean_absolute_error

from IPython.display import HTML
from IPython.display import display
%matplotlib notebook
mpl.rcParams['figure.figsize'] = (9,5)

# Import custom module functions
module_path = os.path.abspath(os.path.join('../'))
if module_path not in sys.path:
    sys.path.append(module_path)

from lstm_load_forecasting import data, lstm

Overall configuration

These parameters are later used, but shouldn't have to change between different model categories (model 1-5)


In [7]:
# Directory with dataset
path = os.path.join(os.path.abspath(''), '../data/fulldataset.csv')

# Splitdate for train and test data. As the TBATS and ARIMA benchmark needs 2 full cycle of all seasonality, needs to be after jan 01. 
loc_tz = pytz.timezone('Europe/Zurich')
split_date = loc_tz.localize(dt.datetime(2017,2,1,0,0,0,0))

# Validation split percentage
validation_split = 0.2
# How many epochs in total
epochs = 30
# Set verbosity level. 0 for only per model, 1 for progress bar...
verbose = 0

# Dataframe containing the relevant data from training of all models
results = pd.DataFrame(columns=['model_name', 'config', 'dropout',
                                'train_loss', 'train_rmse', 'train_mae', 'train_mape', 
                                'valid_loss', 'valid_rmse', 'valid_mae', 'valid_mape', 
                                'test_rmse', 'test_mae', 'test_mape',
                                'epochs', 'batch_train', 'input_shape',
                                'total_time', 'time_step', 'splits'
                               ])
# Early stopping parameters
early_stopping = True
min_delta = 0.006
patience = 2

Preparation and model generation

Necessary preliminary steps and then the generation of all possible models based on the settings at the top of this notebook.


In [8]:
# Generate output folders and files
res_dir = '../results/notebook_' + model_cat_id + '/'
plot_dir = '../plots/notebook_' + model_cat_id + '/'
model_dir = '../models/notebook_' + model_cat_id + '/'
os.makedirs(res_dir, exist_ok=True)
os.makedirs(model_dir, exist_ok=True)
output_table = res_dir + model_cat_id + '_results_' + t.strftime("%Y%m%d") + '.csv'
test_output_table = res_dir + model_cat_id + '_test_results' + t.strftime("%Y%m%d") + '.csv'

# Generate model combinations
models = []
models = lstm.generate_combinations(
    model_name=model_cat_id + '_', layer_conf=layer_conf, cells=cells, dropout=dropout, 
    batch_size=batch_size, timesteps=[1])


==================================
| Number of model configs generated | 432 |

Loading the data:


In [9]:
# Load data and prepare for standardization
df = data.load_dataset(path=path, modules=features)
df_scaled = df.copy()
df_scaled = df_scaled.dropna()

# Get all float type columns and standardize them
floats = [key for key in dict(df_scaled.dtypes) if dict(df_scaled.dtypes)[key] in ['float64']]
scaler = StandardScaler()
scaled_columns = scaler.fit_transform(df_scaled[floats])
df_scaled[floats] = scaled_columns

# Split in train and test dataset
df_train = df_scaled.loc[(df_scaled.index < split_date )].copy()
df_test = df_scaled.loc[df_scaled.index >= split_date].copy()

# Split in features and label data
y_train = df_train['actual'].copy()
X_train = df_train.drop('actual', 1).copy()
y_test = df_test['actual'].copy()
X_test = df_test.drop('actual', 1).copy()

Running through all generated models

Note: Depending on the above settings, this can take very long!


In [ ]:
start_time = t.time()
for idx, m in enumerate(models):
    stopper = t.time()
    print('========================= Model {}/{} ========================='.format(idx+1, len(models)))
    print(tabulate([['Starting with model', m['name']], ['Starting time', datetime.fromtimestamp(stopper)]],
                   tablefmt="jira", numalign="right", floatfmt=".3f"))
    try:
        # Creating the Keras Model
        model = lstm.create_model(layers=m['layers'], sample_size=X_train.shape[0], batch_size=m['batch_size'], 
                          timesteps=m['timesteps'], features=X_train.shape[1])
        # Training...
        history = lstm.train_model(model=model, mode='fit', y=y_train, X=X_train, 
                                   batch_size=m['batch_size'], timesteps=m['timesteps'], epochs=epochs, 
                                   rearrange=False, validation_split=validation_split, verbose=verbose, 
                                   early_stopping=early_stopping, min_delta=min_delta, patience=patience)

        # Write results
        min_loss = np.min(history.history['val_loss'])
        min_idx = np.argmin(history.history['val_loss'])
        min_epoch = min_idx + 1
        
        if verbose > 0:
            print('______________________________________________________________________')
            print(tabulate([['Minimum validation loss at epoch', min_epoch, 'Time: {}'.format(t.time()-stopper)],
                        ['Training loss & MAE', history.history['loss'][min_idx], history.history['mean_absolute_error'][min_idx]  ], 
                        ['Validation loss & mae', history.history['val_loss'][min_idx], history.history['val_mean_absolute_error'][min_idx] ],
                       ], tablefmt="jira", numalign="right", floatfmt=".3f"))
            print('______________________________________________________________________')
        
        
        result = [{'model_name': m['name'], 'config': m, 'train_loss': history.history['loss'][min_idx], 'train_rmse': 0,
                   'train_mae': history.history['mean_absolute_error'][min_idx], 'train_mape': 0,
                   'valid_loss': history.history['val_loss'][min_idx], 'valid_rmse': 0, 
                   'valid_mae': history.history['val_mean_absolute_error'][min_idx],'valid_mape': 0, 
                   'test_rmse': 0, 'test_mae': 0, 'test_mape': 0, 'epochs': '{}/{}'.format(min_epoch, epochs), 'batch_train':m['batch_size'],
                   'input_shape':(X_train.shape[0], timesteps, X_train.shape[1]), 'total_time':t.time()-stopper, 
                   'time_step':0, 'splits':str(split_date), 'dropout': m['layers'][0]['dropout']
                  }]
        results = results.append(result, ignore_index=True)
        
        # Saving the model and weights
        model.save(model_dir + m['name'] + '.h5')
        
        # Write results to csv
        results.to_csv(output_table, sep=';')
        
        K.clear_session()
        import tensorflow as tf
        tf.reset_default_graph()
        
    # Shouldn't catch all errors, but for now...
    except BaseException as e:
        print('=============== ERROR {}/{} ============='.format(idx+1, len(models)))
        print(tabulate([['Model:', m['name']], ['Config:', m]], tablefmt="jira", numalign="right", floatfmt=".3f"))
        print('Error: {}'.format(e))
        result = [{'model_name': m['name'], 'config': m, 'train_loss': str(e)}]
        results = results.append(result, ignore_index=True)
        results.to_csv(output_table,sep=';')
        continue


========================= Model 1/288 =========================
| Starting with model | 02_1_l-30                  |
| Starting time       | 2017-06-17 09:27:58.112810 |
========================= Model 2/288 =========================
| Starting with model | 02_2_l-30_d-0.1            |
| Starting time       | 2017-06-17 09:28:47.857243 |
========================= Model 3/288 =========================
| Starting with model | 02_3_l-30_d-0.2            |
| Starting time       | 2017-06-17 09:31:16.593137 |
========================= Model 4/288 =========================
| Starting with model | 02_4_l-30_l-10             |
| Starting time       | 2017-06-17 09:31:59.621169 |
========================= Model 5/288 =========================
| Starting with model | 02_5_l-30_l-10_d-0.1       |
| Starting time       | 2017-06-17 09:34:11.360011 |
========================= Model 6/288 =========================
| Starting with model | 02_6_l-30_l-10_d-0.2       |
| Starting time       | 2017-06-17 09:37:01.269220 |
========================= Model 7/288 =========================
| Starting with model | 02_7_l-30_l-15             |
| Starting time       | 2017-06-17 09:41:10.714934 |
========================= Model 8/288 =========================
| Starting with model | 02_8_l-30_l-15_d-0.1       |
| Starting time       | 2017-06-17 09:50:01.487827 |
========================= Model 9/288 =========================
| Starting with model | 02_9_l-30_l-15_d-0.2       |
| Starting time       | 2017-06-17 09:54:26.832310 |
========================= Model 10/288 =========================
| Starting with model | 02_10_l-30_l-20            |
| Starting time       | 2017-06-17 09:58:50.437371 |
========================= Model 11/288 =========================
| Starting with model | 02_11_l-30_l-20_d-0.1      |
| Starting time       | 2017-06-17 10:01:12.081681 |
========================= Model 12/288 =========================
| Starting with model | 02_12_l-30_l-20_d-0.2      |
| Starting time       | 2017-06-17 10:04:36.339659 |
========================= Model 13/288 =========================
| Starting with model | 02_13_l-30_l-10            |
| Starting time       | 2017-06-17 10:11:08.030361 |
========================= Model 14/288 =========================
| Starting with model | 02_14_l-30_l-10_d-0.1      |
| Starting time       | 2017-06-17 10:13:04.899830 |
========================= Model 15/288 =========================
| Starting with model | 02_15_l-30_l-10_d-0.2      |
| Starting time       | 2017-06-17 10:17:51.289293 |
========================= Model 16/288 =========================
| Starting with model | 02_16_l-30_l-10_l-10       |
| Starting time       | 2017-06-17 10:21:44.408105 |
========================= Model 17/288 =========================
| Starting with model | 02_17_l-30_l-10_l-10_d-0.1 |
| Starting time       | 2017-06-17 10:24:21.758120 |
========================= Model 18/288 =========================
| Starting with model | 02_18_l-30_l-10_l-10_d-0.2 |
| Starting time       | 2017-06-17 10:30:37.746963 |
========================= Model 19/288 =========================
| Starting with model | 02_19_l-30_l-10_l-15       |
| Starting time       | 2017-06-17 10:38:01.553105 |
========================= Model 20/288 =========================
| Starting with model | 02_20_l-30_l-10_l-15_d-0.1 |
| Starting time       | 2017-06-17 10:41:00.033977 |
========================= Model 21/288 =========================
| Starting with model | 02_21_l-30_l-10_l-15_d-0.2 |
| Starting time       | 2017-06-17 10:51:15.162333 |
========================= Model 22/288 =========================
| Starting with model | 02_22_l-30_l-10_l-20       |
| Starting time       | 2017-06-17 11:02:47.131870 |
========================= Model 23/288 =========================
| Starting with model | 02_23_l-30_l-10_l-20_d-0.1 |
| Starting time       | 2017-06-17 11:09:36.899496 |
========================= Model 24/288 =========================
| Starting with model | 02_24_l-30_l-10_l-20_d-0.2 |
| Starting time       | 2017-06-17 11:17:40.313285 |
========================= Model 25/288 =========================
| Starting with model | 02_25_l-30_l-20            |
| Starting time       | 2017-06-17 11:24:47.651955 |
========================= Model 26/288 =========================
| Starting with model | 02_26_l-30_l-20_d-0.1      |
| Starting time       | 2017-06-17 11:28:18.776774 |
========================= Model 27/288 =========================
| Starting with model | 02_27_l-30_l-20_d-0.2      |
| Starting time       | 2017-06-17 11:31:44.674811 |
========================= Model 28/288 =========================
| Starting with model | 02_28_l-30_l-20_l-10       |
| Starting time       | 2017-06-17 11:36:23.430451 |
========================= Model 29/288 =========================
| Starting with model | 02_29_l-30_l-20_l-10_d-0.1 |
| Starting time       | 2017-06-17 11:39:30.163149 |
========================= Model 30/288 =========================
| Starting with model | 02_30_l-30_l-20_l-10_d-0.2 |
| Starting time       | 2017-06-17 11:44:37.302970 |
========================= Model 31/288 =========================
| Starting with model | 02_31_l-30_l-20_l-15       |
| Starting time       | 2017-06-17 11:51:27.852108 |
========================= Model 32/288 =========================
| Starting with model | 02_32_l-30_l-20_l-15_d-0.1 |
| Starting time       | 2017-06-17 11:55:44.193847 |
========================= Model 33/288 =========================
| Starting with model | 02_33_l-30_l-20_l-15_d-0.2 |
| Starting time       | 2017-06-17 12:00:31.770607 |
========================= Model 34/288 =========================
| Starting with model | 02_34_l-30_l-20_l-20       |
| Starting time       | 2017-06-17 12:10:51.045395 |
========================= Model 35/288 =========================
| Starting with model | 02_35_l-30_l-20_l-20_d-0.1 |
| Starting time       | 2017-06-17 12:14:33.450349 |
========================= Model 36/288 =========================
| Starting with model | 02_36_l-30_l-20_l-20_d-0.2 |
| Starting time       | 2017-06-17 12:20:39.264670 |
========================= Model 37/288 =========================
| Starting with model | 02_37_l-30_l-50            |
| Starting time       | 2017-06-17 12:23:48.567334 |
========================= Model 38/288 =========================
| Starting with model | 02_38_l-30_l-50_d-0.1      |
| Starting time       | 2017-06-17 12:29:38.466649 |
========================= Model 39/288 =========================
| Starting with model | 02_39_l-30_l-50_d-0.2      |
| Starting time       | 2017-06-17 12:32:59.071624 |
========================= Model 40/288 =========================
| Starting with model | 02_40_l-30_l-50_l-10       |
| Starting time       | 2017-06-17 12:39:48.406321 |
========================= Model 41/288 =========================
| Starting with model | 02_41_l-30_l-50_l-10_d-0.1 |
| Starting time       | 2017-06-17 12:43:35.530372 |
========================= Model 42/288 =========================
| Starting with model | 02_42_l-30_l-50_l-10_d-0.2 |
| Starting time       | 2017-06-17 12:50:55.766447 |
========================= Model 43/288 =========================
| Starting with model | 02_43_l-30_l-50_l-15       |
| Starting time       | 2017-06-17 13:02:42.241269 |
========================= Model 44/288 =========================
| Starting with model | 02_44_l-30_l-50_l-15_d-0.1 |
| Starting time       | 2017-06-17 13:11:57.656292 |
========================= Model 45/288 =========================
| Starting with model | 02_45_l-30_l-50_l-15_d-0.2 |
| Starting time       | 2017-06-17 13:16:02.980068 |
========================= Model 46/288 =========================
| Starting with model | 02_46_l-30_l-50_l-20       |
| Starting time       | 2017-06-17 13:19:18.245930 |
========================= Model 47/288 =========================
| Starting with model | 02_47_l-30_l-50_l-20_d-0.1 |
| Starting time       | 2017-06-17 13:21:42.480973 |
========================= Model 48/288 =========================
| Starting with model | 02_48_l-30_l-50_l-20_d-0.2 |
| Starting time       | 2017-06-17 13:27:43.962824 |
========================= Model 49/288 =========================
| Starting with model | 02_49_l-50                 |
| Starting time       | 2017-06-17 13:32:09.201894 |
========================= Model 50/288 =========================
| Starting with model | 02_50_l-50_d-0.1           |
| Starting time       | 2017-06-17 13:33:19.896017 |
========================= Model 51/288 =========================
| Starting with model | 02_51_l-50_d-0.2           |
| Starting time       | 2017-06-17 13:35:43.594090 |
========================= Model 52/288 =========================
| Starting with model | 02_52_l-50_l-10            |
| Starting time       | 2017-06-17 13:40:06.180392 |
========================= Model 53/288 =========================
| Starting with model | 02_53_l-50_l-10_d-0.1      |
| Starting time       | 2017-06-17 13:45:10.797848 |
========================= Model 54/288 =========================
| Starting with model | 02_54_l-50_l-10_d-0.2      |
| Starting time       | 2017-06-17 13:50:18.232172 |
========================= Model 55/288 =========================
| Starting with model | 02_55_l-50_l-15            |
| Starting time       | 2017-06-17 13:57:17.940663 |
========================= Model 56/288 =========================
| Starting with model | 02_56_l-50_l-15_d-0.1      |
| Starting time       | 2017-06-17 14:04:16.330594 |
========================= Model 57/288 =========================
| Starting with model | 02_57_l-50_l-15_d-0.2      |
| Starting time       | 2017-06-17 14:09:39.166262 |
========================= Model 58/288 =========================
| Starting with model | 02_58_l-50_l-20            |
| Starting time       | 2017-06-17 14:11:42.056698 |
========================= Model 59/288 =========================
| Starting with model | 02_59_l-50_l-20_d-0.1      |
| Starting time       | 2017-06-17 14:14:34.483814 |
========================= Model 60/288 =========================
| Starting with model | 02_60_l-50_l-20_d-0.2      |
| Starting time       | 2017-06-17 14:20:34.079223 |
========================= Model 61/288 =========================
| Starting with model | 02_61_l-50_l-10            |
| Starting time       | 2017-06-17 14:30:49.963683 |
========================= Model 62/288 =========================
| Starting with model | 02_62_l-50_l-10_d-0.1      |
| Starting time       | 2017-06-17 14:34:28.315732 |
========================= Model 63/288 =========================
| Starting with model | 02_63_l-50_l-10_d-0.2      |
| Starting time       | 2017-06-17 14:41:54.141071 |
========================= Model 64/288 =========================
| Starting with model | 02_64_l-50_l-10_l-10       |
| Starting time       | 2017-06-17 14:48:10.853911 |
========================= Model 65/288 =========================
| Starting with model | 02_65_l-50_l-10_l-10_d-0.1 |
| Starting time       | 2017-06-17 14:51:26.079016 |
========================= Model 66/288 =========================
| Starting with model | 02_66_l-50_l-10_l-10_d-0.2 |
| Starting time       | 2017-06-17 14:54:41.023126 |
========================= Model 67/288 =========================
| Starting with model | 02_67_l-50_l-10_l-15       |
| Starting time       | 2017-06-17 15:00:25.447869 |
========================= Model 68/288 =========================
| Starting with model | 02_68_l-50_l-10_l-15_d-0.1 |
| Starting time       | 2017-06-17 15:06:32.205912 |
========================= Model 69/288 =========================
| Starting with model | 02_69_l-50_l-10_l-15_d-0.2 |
| Starting time       | 2017-06-17 15:16:32.523039 |
========================= Model 70/288 =========================
| Starting with model | 02_70_l-50_l-10_l-20       |
| Starting time       | 2017-06-17 15:20:43.230922 |
========================= Model 71/288 =========================
| Starting with model | 02_71_l-50_l-10_l-20_d-0.1 |
| Starting time       | 2017-06-17 15:24:08.136113 |
========================= Model 72/288 =========================
| Starting with model | 02_72_l-50_l-10_l-20_d-0.2 |
| Starting time       | 2017-06-17 15:26:30.770105 |
========================= Model 73/288 =========================
| Starting with model | 02_73_l-50_l-20            |
| Starting time       | 2017-06-17 15:41:29.623179 |
========================= Model 74/288 =========================
| Starting with model | 02_74_l-50_l-20_d-0.1      |
| Starting time       | 2017-06-17 15:49:52.149372 |
========================= Model 75/288 =========================
| Starting with model | 02_75_l-50_l-20_d-0.2      |
| Starting time       | 2017-06-17 15:51:39.941686 |
========================= Model 76/288 =========================
| Starting with model | 02_76_l-50_l-20_l-10       |
| Starting time       | 2017-06-17 15:57:22.302235 |
========================= Model 77/288 =========================
| Starting with model | 02_77_l-50_l-20_l-10_d-0.1 |
| Starting time       | 2017-06-17 15:59:58.618121 |
========================= Model 78/288 =========================
| Starting with model | 02_78_l-50_l-20_l-10_d-0.2 |
| Starting time       | 2017-06-17 16:03:27.050579 |
========================= Model 79/288 =========================
| Starting with model | 02_79_l-50_l-20_l-15       |
| Starting time       | 2017-06-17 16:06:52.265142 |
========================= Model 80/288 =========================
| Starting with model | 02_80_l-50_l-20_l-15_d-0.1 |
| Starting time       | 2017-06-17 16:10:11.611277 |
========================= Model 81/288 =========================
| Starting with model | 02_81_l-50_l-20_l-15_d-0.2 |
| Starting time       | 2017-06-17 16:13:26.929225 |
========================= Model 82/288 =========================
| Starting with model | 02_82_l-50_l-20_l-20       |
| Starting time       | 2017-06-17 16:20:51.512234 |
========================= Model 83/288 =========================
| Starting with model | 02_83_l-50_l-20_l-20_d-0.1 |
| Starting time       | 2017-06-17 16:24:13.530264 |
========================= Model 84/288 =========================
| Starting with model | 02_84_l-50_l-20_l-20_d-0.2 |
| Starting time       | 2017-06-17 16:27:42.596336 |
========================= Model 85/288 =========================
| Starting with model | 02_85_l-50_l-50            |
| Starting time       | 2017-06-17 16:31:04.644139 |
========================= Model 86/288 =========================
| Starting with model | 02_86_l-50_l-50_d-0.1      |
| Starting time       | 2017-06-17 16:38:30.643691 |
========================= Model 87/288 =========================
| Starting with model | 02_87_l-50_l-50_d-0.2      |
| Starting time       | 2017-06-17 16:47:11.049494 |
========================= Model 88/288 =========================
| Starting with model | 02_88_l-50_l-50_l-10       |
| Starting time       | 2017-06-17 16:52:22.932045 |
========================= Model 89/288 =========================
| Starting with model | 02_89_l-50_l-50_l-10_d-0.1 |
| Starting time       | 2017-06-17 16:55:13.796142 |
========================= Model 90/288 =========================
| Starting with model | 02_90_l-50_l-50_l-10_d-0.2 |
| Starting time       | 2017-06-17 17:00:09.674513 |
========================= Model 91/288 =========================
| Starting with model | 02_91_l-50_l-50_l-15       |
| Starting time       | 2017-06-17 17:07:24.509569 |
========================= Model 92/288 =========================
| Starting with model | 02_92_l-50_l-50_l-15_d-0.1 |
| Starting time       | 2017-06-17 17:09:52.976379 |
========================= Model 93/288 =========================
| Starting with model | 02_93_l-50_l-50_l-15_d-0.2 |
| Starting time       | 2017-06-17 17:14:47.848049 |
========================= Model 94/288 =========================
| Starting with model | 02_94_l-50_l-50_l-20       |
| Starting time       | 2017-06-17 17:18:33.261907 |
========================= Model 95/288 =========================
| Starting with model | 02_95_l-50_l-50_l-20_d-0.1 |
| Starting time       | 2017-06-17 17:22:35.589603 |
========================= Model 96/288 =========================
| Starting with model | 02_96_l-50_l-50_l-20_d-0.2 |
| Starting time       | 2017-06-17 17:26:20.828251 |
========================= Model 97/288 =========================
| Starting with model | 02_97_l-75                 |
| Starting time       | 2017-06-17 17:37:19.345712 |

Model selection based on the validation MAE

Select the top 5 models based on the Mean Absolute Error in the validation data: http://scikit-learn.org/stable/modules/model_evaluation.html#mean-absolute-error


In [13]:
# Number of the selected top models 
selection = 5
# If run in the same instance not necessary. If run on the same day, then just use output_table
results_fn = res_dir + model_cat_id + '_results_' + '20170616' + '.csv'

results_csv = pd.read_csv(results_fn, delimiter=';', encoding = 'latin1')
top_models = results_csv.nsmallest(selection, 'valid_mae')

Evaluate top 5 models


In [14]:
# Init test results table
test_results = pd.DataFrame(columns=['Model name', 'Mean absolute error', 'Mean squared error'])

# Init empty predictions
predictions = {}

# Loop through models
for index, row in top_models.iterrows():
    filename = model_dir + row['model_name'] + '.h5'
    model = load_model(filename)
    batch_size = int(row['batch_train'])
    
    # Calculate scores
    loss, mae = lstm.evaluate_model(model=model, X=X_test, y=y_test, batch_size=batch_size, timesteps=1, verbose=verbose)
    
    # Store results
    result = [{'Model name': row['model_name'], 
               'Mean squared error': loss, 'Mean absolute error': mae
              }]
    test_results = test_results.append(result, ignore_index=True)
    
    # Generate predictions
    model.reset_states()
    model_predictions = lstm.get_predictions(model=model, X=X_test, batch_size=batch_size, timesteps=timesteps[0], verbose=verbose)
    
    # Save predictions
    predictions[row['model_name']] = model_predictions
    
    K.clear_session()
    import tensorflow as tf
    tf.reset_default_graph()
    

test_results = test_results.sort_values('Mean absolute error', ascending=True)
test_results = test_results.set_index(['Model name'])

if not os.path.isfile(test_output_table):
    test_results.to_csv(test_output_table, sep=';')
else: # else it exists so append without writing the header
    test_results.to_csv(test_output_table,mode = 'a',header=False, sep=';')

In [15]:
print('Test dataset performance of the best {} (out of {} tested models):'.format(min(selection, len(models)), len(models)))
print(tabulate(test_results, headers='keys', tablefmt="grid", numalign="right", floatfmt=".3f"))


Test dataset performance of the best 5 (out of 432 tested models):
+--------------------------+-----------------------+----------------------+
| Model name               |   Mean absolute error |   Mean squared error |
+==========================+=======================+======================+
| 1_02_2_l-5_d-0.1         |                 0.305 |                0.151 |
+--------------------------+-----------------------+----------------------+
| 1_02_50_l-10_d-0.1       |                 0.320 |                0.165 |
+--------------------------+-----------------------+----------------------+
| 1_02_135_l-20_l-50_d-0.2 |                 0.341 |                0.181 |
+--------------------------+-----------------------+----------------------+
| 3_02_51_l-100_d-0.2      |                 0.351 |                0.189 |
+--------------------------+-----------------------+----------------------+
| 3_02_49_l-100            |                 0.367 |                0.209 |
+--------------------------+-----------------------+----------------------+

In [ ]: