Predicting Activity with a Look-back

Multilayer Perceptron

... i.e., first attempts at time dependence, with PCA and with full dataset


In [2]:
import numpy
import pandas
from keras.models import Sequential
from keras.layers import Dense
from keras.wrappers.scikit_learn import KerasClassifier
from sklearn.cross_validation import cross_val_score
from sklearn.preprocessing import LabelEncoder
from sklearn.cross_validation import StratifiedKFold
from sklearn.preprocessing import StandardScaler
from sklearn.pipeline import Pipeline

# fix random seed for reproducibility
seed = 66
numpy.random.seed(seed)

data = pandas.read_csv("../data/processed/train.csv")
notnull_data = data[data.notnull().all(axis=1)]
train = notnull_data.values
data2 = pandas.read_csv("../data/processed/test.csv")
notnull_data2 = data2[data2.notnull().all(axis=1)]
test = notnull_data2.values


Using Theano backend.

In [3]:
X_train = train[:,3:7558].astype(float)
#X_train = train[:,3:13].astype(float)
Y_train = train[:,7558]
X_test = test[:,3:7558].astype(float)
#X_test = test[:,3:13].astype(float)
Y_test = test[:,7558]

# One hot encoding of the response variable (using dummy variables)
from keras.utils.np_utils import to_categorical

# encode class values as integers
encoder = LabelEncoder()
encoder.fit(Y_train)
encoded_Y_train = encoder.transform(Y_train)
# convert integers to dummy variables (i.e. one hot encoded)
dummy_y_train = to_categorical(encoded_Y_train)
encoder.fit(Y_test)
encoded_Y_test = encoder.transform(Y_test)
# convert integers to dummy variables (i.e. one hot encoded)
dummy_y_test = to_categorical(encoded_Y_test)

# Sanity check on matrix dimensions, after droppinig null/nans
#print X_train.shape #(4472, 7555)
#print Y_test.shape #(1044, )
#print dummy_y_test.shape # (1044, 5)

In [4]:
# We standardize on the basis of the training data
scaler = StandardScaler().fit(X_train)
X_train_st = scaler.transform(X_train)
X_test_st = scaler.transform(X_test)

# We do PCA, too
from sklearn import decomposition
n_comp = 100
pca = decomposition.PCA(n_components=n_comp)
X_train_pca = pca.fit_transform(X_train_st)
X_test_pca = pca.transform(X_test_st)

print 'Variance explained:'
print pca.explained_variance_ratio_
print 'Total variance explained by '+str(n_comp)+' components:'
print sum(pca.explained_variance_ratio_)


Variance explained:
[ 0.19505564  0.08295663  0.04067402  0.02174272  0.01775119  0.01686748
  0.01440615  0.01150575  0.01055     0.00958828  0.00877439  0.00790115
  0.007298    0.00704889  0.0066142   0.00595329  0.00538577  0.00530285
  0.00491633  0.0045751   0.00440203  0.00436509  0.00395662  0.00343414
  0.00340722  0.00328142  0.00325585  0.00305899  0.00297009  0.00289683
  0.00281987  0.00273652  0.00266189  0.00265268  0.00260069  0.0024912
  0.00238337  0.00232648  0.00226495  0.00224082  0.00221907  0.00217391
  0.00215321  0.00208428  0.00204535  0.00198475  0.00192726  0.00190714
  0.00187787  0.00183914  0.00176811  0.00173735  0.00169542  0.00166308
  0.00165403  0.00162881  0.00161277  0.0015787   0.00157364  0.00155227
  0.0015123   0.00149719  0.00148685  0.00147954  0.00145035  0.00142099
  0.00141436  0.00141164  0.00138952  0.00136064  0.00134219  0.0013185
  0.00130973  0.00129871  0.00129763  0.0012655   0.00125215  0.00124378
  0.00122832  0.0012048   0.00119168  0.00117427  0.0011715   0.00116194
  0.0011515   0.00113744  0.00112647  0.00111649  0.00111051  0.00109261
  0.00108877  0.00107673  0.00107208  0.00105653  0.00104749  0.00103841
  0.00103485  0.00101975  0.00100922  0.00100201]
Total variance explained by 100 components:
0.630815671837

In [39]:
# We create the look-back dataset
#print train[:,1:3] # session is [1], timestamp is [2]

# convert an array of values into a dataset matrix, depending on the nr. of steps to look back
def create_dataset(sessiontimes, X, Y, look_back=1):
    dataX, dataY = [], []
    if((sessiontimes.shape[0] != X.shape[0]) or 
       (sessiontimes.shape[0] != Y.shape[0])):
        # Different number of rows, something's wrong!
        print 'Dimensions of the X, Y and timestamps do not match!!'
        return None
    sessions = numpy.unique(sessiontimes[:,0])
    for session in sessions:
        print session
        stimes = sessiontimes[numpy.where(sessiontimes[:,0] == session)]
        sessionX = X[numpy.where(sessiontimes[:,0] == session)]
        sessionY = Y[numpy.where(sessiontimes[:,0] == session)]
        sessiondataX, sessiondataY = [], []
        # For Y, we just eliminate the first look_back rows
        sessiondataY = sessionY[look_back:sessionY.shape[0],:]
        # For X, se successively roll and append the data, then eliminate the first look_back rows
        for i in range(look_back+1):
            rolled = numpy.roll(sessionX,i,axis=0)
            if(i == 0):
                sessiondataX = numpy.array(rolled)
            else:
                sessiondataX = numpy.hstack((sessiondataX, numpy.array(rolled)))
        sessiondataX = sessiondataX[look_back:(sessiondataX.shape[0]),:]
        # We join all the sessions data
        if len(dataX)==0:
            dataX = sessiondataX
            dataY = sessiondataY
        else:
            dataX = numpy.vstack((dataX, sessiondataX))
            dataY = numpy.vstack((dataY, sessiondataY))
    return dataX, dataY

# reshape into X=t and Y=t+1
lookbk = 9
X_train_lb, Y_train_lb = create_dataset(train[:,1:3], X_train_pca, dummy_y_train, lookbk)
X_test_lb, Y_test_lb = create_dataset(test[:,1:3], X_test_pca, dummy_y_test, lookbk)
print X_train_lb.shape, Y_test_lb.shape


case1-day1-session2-teacher1
case1-day1-session3-teacher1
case1-day1-session4-teacher1
case2-day1-session1-teacher2
case2-day1-session2-teacher2
case2-day2-session1-teacher2
case2-day2-session2-teacher2
case2-day3-session2-teacher2
case2-day4-session1-teacher2
case2-day4-session2-teacher2
case1-day1-session1-teacher1
case2-day3-session1-teacher2
(4382, 1000) (1026, 5)

In [40]:
from keras.layers import Dropout
from keras.constraints import maxnorm
from keras.optimizers import SGD

# baseline model
def create_baseline():
    # create model
    model = Sequential()
    model.add(Dense(200, input_dim=7555, init='uniform', activation='tanh', W_constraint=maxnorm(4)))
    model.add(Dense(20, init='uniform', activation='tanh', W_constraint=maxnorm(4)))
    model.add(Dense(5, init='uniform', activation='sigmoid'))
    # Compile model
    model.compile(loss='categorical_crossentropy', optimizer='adam', metrics=['accuracy'])
    return model

# Apply dropout regularization, it is overfitting!
def create_dropout():
    # create model
    model = Sequential()
    model.add(Dropout(0.2, input_shape=(7555,)))
    model.add(Dense(200, init='uniform', activation='tanh'))
    model.add(Dropout(0.2))
    model.add(Dense(20, init='uniform', activation='tanh'))
    model.add(Dropout(0.2))
    model.add(Dense(5, init='uniform', activation='sigmoid'))
    # Compile model, with larger learning rate and momentum, as recommended by the original paper
    sgd = SGD(lr=0.01, momentum=0.9, decay=0.0, nesterov=False)
    model.compile(loss='categorical_crossentropy', optimizer=sgd, metrics=['accuracy'])
    return model

# Apply dropout regularization, it is overfitting!
def create_dropout_decay():
    # create model
    model = Sequential()
    model.add(Dropout(0.2, input_shape=(7555,)))
    model.add(Dense(200, init='uniform', activation='tanh'))
    model.add(Dropout(0.2))
    model.add(Dense(20, init='uniform', activation='tanh'))
    model.add(Dropout(0.2))
    model.add(Dense(5, init='uniform', activation='sigmoid'))
    # Compile model, with larger learning rate and momentum, as recommended by the original paper
    sgd = SGD(lr=0.1, momentum=0.9, decay=0.005, nesterov=False)
    model.compile(loss='categorical_crossentropy', optimizer=sgd, metrics=['accuracy'])
    return model

# Apply dropout regularization, it is overfitting!
def create_deeper_dropout_decay_PCA(n_components, lookback, learningrate, n_epochs):
    # create model
    model = Sequential()
    model.add(Dropout(0.2, input_shape=(n_components*(lookback+1),)))
    model.add(Dense(300, init='uniform', activation='tanh'))
    model.add(Dropout(0.2))
    model.add(Dense(300, init='uniform', activation='tanh'))
    model.add(Dropout(0.2))
    model.add(Dense(80, init='uniform', activation='tanh'))
    model.add(Dropout(0.2))
    model.add(Dense(80, init='uniform', activation='tanh'))
    model.add(Dropout(0.2))
    model.add(Dense(20, init='uniform', activation='tanh'))
    model.add(Dropout(0.2))
    model.add(Dense(20, init='uniform', activation='tanh'))
    model.add(Dropout(0.2))
    model.add(Dense(5, init='uniform', activation='sigmoid'))
    # Compile model, with larger learning rate and momentum, as recommended by the original paper
    sgd = SGD(lr=learningrate, momentum=0.8, decay=learningrate/n_epochs, nesterov=False)
    model.compile(loss='categorical_crossentropy', optimizer=sgd, metrics=['accuracy'])
    return model

# evaluate baseline model with standardized dataset
numpy.random.seed(seed)
#estimators = []
#estimators.append(('standardize', StandardScaler()))
#estimators.append(('mlp', KerasClassifier(build_fn=create_baseline, nb_epoch=10, batch_size=10, verbose=1)))
# We define a pipeline of estimators, in which first the scaler is fitted to the data, then the MLP is applied
#pipeline = Pipeline(estimators)
#kfold = StratifiedKFold(y=Y_train, n_folds=3, shuffle=True, random_state=seed)
#model = create_baseline()
learningrate = 0.1
n_epochs = 1000
model = create_deeper_dropout_decay_PCA(n_components=n_comp, lookback=lookbk, learningrate=learningrate, n_epochs=n_epochs)
print model.summary()


____________________________________________________________________________________________________
Layer (type)                     Output Shape          Param #     Connected to                     
====================================================================================================
dropout_22 (Dropout)             (None, 1000)          0           dropout_input_4[0][0]            
____________________________________________________________________________________________________
dense_22 (Dense)                 (None, 300)           300300      dropout_22[0][0]                 
____________________________________________________________________________________________________
dropout_23 (Dropout)             (None, 300)           0           dense_22[0][0]                   
____________________________________________________________________________________________________
dense_23 (Dense)                 (None, 300)           90300       dropout_23[0][0]                 
____________________________________________________________________________________________________
dropout_24 (Dropout)             (None, 300)           0           dense_23[0][0]                   
____________________________________________________________________________________________________
dense_24 (Dense)                 (None, 80)            24080       dropout_24[0][0]                 
____________________________________________________________________________________________________
dropout_25 (Dropout)             (None, 80)            0           dense_24[0][0]                   
____________________________________________________________________________________________________
dense_25 (Dense)                 (None, 80)            6480        dropout_25[0][0]                 
____________________________________________________________________________________________________
dropout_26 (Dropout)             (None, 80)            0           dense_25[0][0]                   
____________________________________________________________________________________________________
dense_26 (Dense)                 (None, 20)            1620        dropout_26[0][0]                 
____________________________________________________________________________________________________
dropout_27 (Dropout)             (None, 20)            0           dense_26[0][0]                   
____________________________________________________________________________________________________
dense_27 (Dense)                 (None, 20)            420         dropout_27[0][0]                 
____________________________________________________________________________________________________
dropout_28 (Dropout)             (None, 20)            0           dense_27[0][0]                   
____________________________________________________________________________________________________
dense_28 (Dense)                 (None, 5)             105         dropout_28[0][0]                 
====================================================================================================
Total params: 423305
____________________________________________________________________________________________________
None

In [41]:
from keras.callbacks import ModelCheckpoint

# To save the best model
# serialize model to JSON
model_json = model.to_json()
with open("model--7mlp--lb.json", "w") as json_file:
    json_file.write(model_json)
filepath="weights--7mlp--lb.best.hdf5"
# Define that the accuracy in cv is monitored, and that weights are stored in a file when max accuracy is achieved
checkpoint = ModelCheckpoint(filepath, monitor='val_acc', verbose=1, save_best_only=True, mode='max')
callbacks_list = [checkpoint]

# Fit the model
history = model.fit(X_train_lb, Y_train_lb, validation_data=(X_test_lb,Y_test_lb), 
                    nb_epoch=n_epochs, batch_size=10, verbose=0, callbacks=callbacks_list)
#results = cross_val_score(pipeline, X_train, dummy_y_train, cv=kfold)
#print("Standardized data Acc (in CV training data): %.2f%% (%.2f%%)" % (results.mean()*100, results.std()*100))
# evaluate the model
#scores = pipeline.evaluate(X_test, dummy_y_test)
#print pipeline.metrics_names[1]
#print scores[1]*100
# For other metrics, see http://machinelearningmastery.com/metrics-evaluate-machine-learning-algorithms-python/


Epoch 00000: val_acc improved from -inf to 0.47661, saving model to weights--7mlp--lb.best.hdf5
Epoch 00001: val_acc improved from 0.47661 to 0.50487, saving model to weights--7mlp--lb.best.hdf5
Epoch 00002: val_acc improved from 0.50487 to 0.50877, saving model to weights--7mlp--lb.best.hdf5
Epoch 00003: val_acc did not improve
Epoch 00004: val_acc did not improve
Epoch 00005: val_acc did not improve
Epoch 00006: val_acc did not improve
Epoch 00007: val_acc improved from 0.50877 to 0.51072, saving model to weights--7mlp--lb.best.hdf5
Epoch 00008: val_acc did not improve
Epoch 00009: val_acc did not improve
Epoch 00010: val_acc did not improve
Epoch 00011: val_acc did not improve
Epoch 00012: val_acc improved from 0.51072 to 0.54191, saving model to weights--7mlp--lb.best.hdf5
Epoch 00013: val_acc did not improve
Epoch 00014: val_acc did not improve
Epoch 00015: val_acc did not improve
Epoch 00016: val_acc did not improve
Epoch 00017: val_acc did not improve
Epoch 00018: val_acc did not improve
Epoch 00019: val_acc did not improve
Epoch 00020: val_acc did not improve
Epoch 00021: val_acc did not improve
Epoch 00022: val_acc did not improve
Epoch 00023: val_acc did not improve
Epoch 00024: val_acc did not improve
Epoch 00025: val_acc did not improve
Epoch 00026: val_acc improved from 0.54191 to 0.55750, saving model to weights--7mlp--lb.best.hdf5
Epoch 00027: val_acc did not improve
Epoch 00028: val_acc did not improve
Epoch 00029: val_acc did not improve
Epoch 00030: val_acc did not improve
Epoch 00031: val_acc did not improve
Epoch 00032: val_acc did not improve
Epoch 00033: val_acc did not improve
Epoch 00034: val_acc did not improve
Epoch 00035: val_acc improved from 0.55750 to 0.56238, saving model to weights--7mlp--lb.best.hdf5
Epoch 00036: val_acc did not improve
Epoch 00037: val_acc did not improve
Epoch 00038: val_acc did not improve
Epoch 00039: val_acc did not improve
Epoch 00040: val_acc did not improve
Epoch 00041: val_acc did not improve
Epoch 00042: val_acc did not improve
Epoch 00043: val_acc did not improve
Epoch 00044: val_acc did not improve
Epoch 00045: val_acc did not improve
Epoch 00046: val_acc did not improve
Epoch 00047: val_acc did not improve
Epoch 00048: val_acc did not improve
Epoch 00049: val_acc did not improve
Epoch 00050: val_acc improved from 0.56238 to 0.57895, saving model to weights--7mlp--lb.best.hdf5
Epoch 00051: val_acc did not improve
Epoch 00052: val_acc did not improve
Epoch 00053: val_acc did not improve
Epoch 00054: val_acc did not improve
Epoch 00055: val_acc did not improve
Epoch 00056: val_acc did not improve
Epoch 00057: val_acc did not improve
Epoch 00058: val_acc did not improve
Epoch 00059: val_acc did not improve
Epoch 00060: val_acc did not improve
Epoch 00061: val_acc did not improve
Epoch 00062: val_acc did not improve
Epoch 00063: val_acc did not improve
Epoch 00064: val_acc did not improve
Epoch 00065: val_acc did not improve
Epoch 00066: val_acc did not improve
Epoch 00067: val_acc did not improve
Epoch 00068: val_acc did not improve
Epoch 00069: val_acc did not improve
Epoch 00070: val_acc did not improve
Epoch 00071: val_acc did not improve
Epoch 00072: val_acc did not improve
Epoch 00073: val_acc did not improve
Epoch 00074: val_acc did not improve
Epoch 00075: val_acc did not improve
Epoch 00076: val_acc did not improve
Epoch 00077: val_acc did not improve
Epoch 00078: val_acc did not improve
Epoch 00079: val_acc did not improve
Epoch 00080: val_acc did not improve
Epoch 00081: val_acc did not improve
Epoch 00082: val_acc did not improve
Epoch 00083: val_acc did not improve
Epoch 00084: val_acc did not improve
Epoch 00085: val_acc did not improve
Epoch 00086: val_acc did not improve
Epoch 00087: val_acc did not improve
Epoch 00088: val_acc did not improve
Epoch 00089: val_acc did not improve
Epoch 00090: val_acc did not improve
Epoch 00091: val_acc did not improve
Epoch 00092: val_acc did not improve
Epoch 00093: val_acc did not improve
Epoch 00094: val_acc did not improve
Epoch 00095: val_acc did not improve
Epoch 00096: val_acc did not improve
Epoch 00097: val_acc did not improve
Epoch 00098: val_acc did not improve
Epoch 00099: val_acc did not improve
Epoch 00100: val_acc did not improve
Epoch 00101: val_acc did not improve
Epoch 00102: val_acc did not improve
Epoch 00103: val_acc did not improve
Epoch 00104: val_acc did not improve
Epoch 00105: val_acc did not improve
Epoch 00106: val_acc did not improve
Epoch 00107: val_acc did not improve
Epoch 00108: val_acc did not improve
Epoch 00109: val_acc did not improve
Epoch 00110: val_acc did not improve
Epoch 00111: val_acc did not improve
Epoch 00112: val_acc did not improve
Epoch 00113: val_acc did not improve
Epoch 00114: val_acc did not improve
Epoch 00115: val_acc did not improve
Epoch 00116: val_acc did not improve
Epoch 00117: val_acc did not improve
Epoch 00118: val_acc did not improve
Epoch 00119: val_acc did not improve
Epoch 00120: val_acc did not improve
Epoch 00121: val_acc did not improve
Epoch 00122: val_acc did not improve
Epoch 00123: val_acc did not improve
Epoch 00124: val_acc did not improve
Epoch 00125: val_acc did not improve
Epoch 00126: val_acc did not improve
Epoch 00127: val_acc did not improve
Epoch 00128: val_acc did not improve
Epoch 00129: val_acc did not improve
Epoch 00130: val_acc did not improve
Epoch 00131: val_acc did not improve
Epoch 00132: val_acc did not improve
Epoch 00133: val_acc did not improve
Epoch 00134: val_acc did not improve
Epoch 00135: val_acc did not improve
Epoch 00136: val_acc did not improve
Epoch 00137: val_acc did not improve
Epoch 00138: val_acc did not improve
Epoch 00139: val_acc did not improve
Epoch 00140: val_acc did not improve
Epoch 00141: val_acc did not improve
Epoch 00142: val_acc did not improve
Epoch 00143: val_acc did not improve
Epoch 00144: val_acc did not improve
Epoch 00145: val_acc did not improve
Epoch 00146: val_acc did not improve
Epoch 00147: val_acc did not improve
Epoch 00148: val_acc did not improve
Epoch 00149: val_acc did not improve
Epoch 00150: val_acc did not improve
Epoch 00151: val_acc did not improve
Epoch 00152: val_acc did not improve
Epoch 00153: val_acc did not improve
Epoch 00154: val_acc did not improve
Epoch 00155: val_acc did not improve
Epoch 00156: val_acc did not improve
Epoch 00157: val_acc did not improve
Epoch 00158: val_acc did not improve
Epoch 00159: val_acc did not improve
Epoch 00160: val_acc did not improve
Epoch 00161: val_acc did not improve
Epoch 00162: val_acc did not improve
Epoch 00163: val_acc did not improve
Epoch 00164: val_acc did not improve
Epoch 00165: val_acc did not improve
Epoch 00166: val_acc did not improve
Epoch 00167: val_acc did not improve
Epoch 00168: val_acc did not improve
Epoch 00169: val_acc did not improve
Epoch 00170: val_acc did not improve
Epoch 00171: val_acc did not improve
Epoch 00172: val_acc did not improve
Epoch 00173: val_acc did not improve
Epoch 00174: val_acc did not improve
Epoch 00175: val_acc did not improve
Epoch 00176: val_acc did not improve
Epoch 00177: val_acc did not improve
Epoch 00178: val_acc did not improve
Epoch 00179: val_acc did not improve
Epoch 00180: val_acc did not improve
Epoch 00181: val_acc did not improve
Epoch 00182: val_acc did not improve
Epoch 00183: val_acc did not improve
Epoch 00184: val_acc did not improve
Epoch 00185: val_acc did not improve
Epoch 00186: val_acc did not improve
Epoch 00187: val_acc did not improve
Epoch 00188: val_acc did not improve
Epoch 00189: val_acc did not improve
Epoch 00190: val_acc did not improve
Epoch 00191: val_acc did not improve
Epoch 00192: val_acc did not improve
Epoch 00193: val_acc did not improve
Epoch 00194: val_acc did not improve
Epoch 00195: val_acc did not improve
Epoch 00196: val_acc did not improve
Epoch 00197: val_acc did not improve
Epoch 00198: val_acc did not improve
Epoch 00199: val_acc did not improve
Epoch 00200: val_acc did not improve
Epoch 00201: val_acc did not improve
Epoch 00202: val_acc did not improve
Epoch 00203: val_acc did not improve
Epoch 00204: val_acc did not improve
Epoch 00205: val_acc did not improve
Epoch 00206: val_acc did not improve
Epoch 00207: val_acc did not improve
Epoch 00208: val_acc did not improve
Epoch 00209: val_acc did not improve
Epoch 00210: val_acc did not improve
Epoch 00211: val_acc did not improve
Epoch 00212: val_acc did not improve
Epoch 00213: val_acc did not improve
Epoch 00214: val_acc did not improve
Epoch 00215: val_acc did not improve
Epoch 00216: val_acc did not improve
Epoch 00217: val_acc did not improve
Epoch 00218: val_acc did not improve
Epoch 00219: val_acc did not improve
Epoch 00220: val_acc did not improve
Epoch 00221: val_acc did not improve
Epoch 00222: val_acc did not improve
Epoch 00223: val_acc did not improve
Epoch 00224: val_acc did not improve
Epoch 00225: val_acc did not improve
Epoch 00226: val_acc did not improve
Epoch 00227: val_acc did not improve
Epoch 00228: val_acc did not improve
Epoch 00229: val_acc did not improve
Epoch 00230: val_acc did not improve
Epoch 00231: val_acc did not improve
Epoch 00232: val_acc did not improve
Epoch 00233: val_acc did not improve
Epoch 00234: val_acc did not improve
Epoch 00235: val_acc did not improve
Epoch 00236: val_acc did not improve
Epoch 00237: val_acc did not improve
Epoch 00238: val_acc did not improve
Epoch 00239: val_acc did not improve
Epoch 00240: val_acc did not improve
Epoch 00241: val_acc did not improve
Epoch 00242: val_acc did not improve
Epoch 00243: val_acc did not improve
Epoch 00244: val_acc did not improve
Epoch 00245: val_acc did not improve
Epoch 00246: val_acc did not improve
Epoch 00247: val_acc did not improve
Epoch 00248: val_acc did not improve
Epoch 00249: val_acc did not improve
Epoch 00250: val_acc did not improve
Epoch 00251: val_acc did not improve
Epoch 00252: val_acc did not improve
Epoch 00253: val_acc did not improve
Epoch 00254: val_acc did not improve
Epoch 00255: val_acc did not improve
Epoch 00256: val_acc did not improve
Epoch 00257: val_acc did not improve
Epoch 00258: val_acc did not improve
Epoch 00259: val_acc did not improve
Epoch 00260: val_acc did not improve
Epoch 00261: val_acc did not improve
Epoch 00262: val_acc did not improve
Epoch 00263: val_acc did not improve
Epoch 00264: val_acc did not improve
Epoch 00265: val_acc did not improve
Epoch 00266: val_acc did not improve
Epoch 00267: val_acc did not improve
Epoch 00268: val_acc did not improve
Epoch 00269: val_acc did not improve
Epoch 00270: val_acc did not improve
Epoch 00271: val_acc did not improve
Epoch 00272: val_acc did not improve
Epoch 00273: val_acc did not improve
Epoch 00274: val_acc did not improve
Epoch 00275: val_acc did not improve
Epoch 00276: val_acc did not improve
Epoch 00277: val_acc did not improve
Epoch 00278: val_acc did not improve
Epoch 00279: val_acc did not improve
Epoch 00280: val_acc did not improve
Epoch 00281: val_acc did not improve
Epoch 00282: val_acc did not improve
Epoch 00283: val_acc did not improve
Epoch 00284: val_acc did not improve
Epoch 00285: val_acc did not improve
Epoch 00286: val_acc did not improve
Epoch 00287: val_acc did not improve
Epoch 00288: val_acc did not improve
Epoch 00289: val_acc did not improve
Epoch 00290: val_acc did not improve
Epoch 00291: val_acc did not improve
Epoch 00292: val_acc did not improve
Epoch 00293: val_acc did not improve
Epoch 00294: val_acc did not improve
Epoch 00295: val_acc did not improve
Epoch 00296: val_acc did not improve
Epoch 00297: val_acc did not improve
Epoch 00298: val_acc did not improve
Epoch 00299: val_acc did not improve
Epoch 00300: val_acc did not improve
Epoch 00301: val_acc did not improve
Epoch 00302: val_acc did not improve
Epoch 00303: val_acc did not improve
Epoch 00304: val_acc did not improve
Epoch 00305: val_acc did not improve
Epoch 00306: val_acc did not improve
Epoch 00307: val_acc did not improve
Epoch 00308: val_acc did not improve
Epoch 00309: val_acc did not improve
Epoch 00310: val_acc did not improve
Epoch 00311: val_acc did not improve
Epoch 00312: val_acc did not improve
Epoch 00313: val_acc did not improve
Epoch 00314: val_acc did not improve
Epoch 00315: val_acc did not improve
Epoch 00316: val_acc did not improve
Epoch 00317: val_acc did not improve
Epoch 00318: val_acc did not improve
Epoch 00319: val_acc did not improve
Epoch 00320: val_acc did not improve
Epoch 00321: val_acc did not improve
Epoch 00322: val_acc did not improve
Epoch 00323: val_acc did not improve
Epoch 00324: val_acc did not improve
Epoch 00325: val_acc did not improve
Epoch 00326: val_acc did not improve
Epoch 00327: val_acc did not improve
Epoch 00328: val_acc did not improve
Epoch 00329: val_acc did not improve
Epoch 00330: val_acc did not improve
Epoch 00331: val_acc did not improve
Epoch 00332: val_acc did not improve
Epoch 00333: val_acc did not improve
Epoch 00334: val_acc did not improve
Epoch 00335: val_acc did not improve
Epoch 00336: val_acc did not improve
Epoch 00337: val_acc did not improve
Epoch 00338: val_acc did not improve
Epoch 00339: val_acc did not improve
Epoch 00340: val_acc did not improve
Epoch 00341: val_acc did not improve
Epoch 00342: val_acc did not improve
Epoch 00343: val_acc did not improve
Epoch 00344: val_acc did not improve
Epoch 00345: val_acc did not improve
Epoch 00346: val_acc did not improve
Epoch 00347: val_acc did not improve
Epoch 00348: val_acc did not improve
Epoch 00349: val_acc did not improve
Epoch 00350: val_acc did not improve
Epoch 00351: val_acc did not improve
Epoch 00352: val_acc did not improve
Epoch 00353: val_acc did not improve
Epoch 00354: val_acc did not improve
Epoch 00355: val_acc did not improve
Epoch 00356: val_acc did not improve
Epoch 00357: val_acc did not improve
Epoch 00358: val_acc improved from 0.57895 to 0.58187, saving model to weights--7mlp--lb.best.hdf5
Epoch 00359: val_acc did not improve
Epoch 00360: val_acc did not improve
Epoch 00361: val_acc did not improve
Epoch 00362: val_acc did not improve
Epoch 00363: val_acc did not improve
Epoch 00364: val_acc did not improve
Epoch 00365: val_acc did not improve
Epoch 00366: val_acc did not improve
Epoch 00367: val_acc did not improve
Epoch 00368: val_acc did not improve
Epoch 00369: val_acc did not improve
Epoch 00370: val_acc did not improve
Epoch 00371: val_acc did not improve
Epoch 00372: val_acc did not improve
Epoch 00373: val_acc did not improve
Epoch 00374: val_acc improved from 0.58187 to 0.58285, saving model to weights--7mlp--lb.best.hdf5
Epoch 00375: val_acc did not improve
Epoch 00376: val_acc did not improve
Epoch 00377: val_acc did not improve
Epoch 00378: val_acc did not improve
Epoch 00379: val_acc did not improve
Epoch 00380: val_acc did not improve
Epoch 00381: val_acc did not improve
Epoch 00382: val_acc did not improve
Epoch 00383: val_acc did not improve
Epoch 00384: val_acc did not improve
Epoch 00385: val_acc did not improve
Epoch 00386: val_acc did not improve
Epoch 00387: val_acc did not improve
Epoch 00388: val_acc did not improve
Epoch 00389: val_acc did not improve
Epoch 00390: val_acc did not improve
Epoch 00391: val_acc did not improve
Epoch 00392: val_acc did not improve
Epoch 00393: val_acc did not improve
Epoch 00394: val_acc did not improve
Epoch 00395: val_acc did not improve
Epoch 00396: val_acc did not improve
Epoch 00397: val_acc did not improve
Epoch 00398: val_acc did not improve
Epoch 00399: val_acc did not improve
Epoch 00400: val_acc did not improve
Epoch 00401: val_acc did not improve
Epoch 00402: val_acc did not improve
Epoch 00403: val_acc did not improve
Epoch 00404: val_acc did not improve
Epoch 00405: val_acc did not improve
Epoch 00406: val_acc did not improve
Epoch 00407: val_acc improved from 0.58285 to 0.58382, saving model to weights--7mlp--lb.best.hdf5
Epoch 00408: val_acc did not improve
Epoch 00409: val_acc did not improve
Epoch 00410: val_acc did not improve
Epoch 00411: val_acc did not improve
Epoch 00412: val_acc did not improve
Epoch 00413: val_acc did not improve
Epoch 00414: val_acc did not improve
Epoch 00415: val_acc did not improve
Epoch 00416: val_acc did not improve
Epoch 00417: val_acc did not improve
Epoch 00418: val_acc did not improve
Epoch 00419: val_acc did not improve
Epoch 00420: val_acc did not improve
Epoch 00421: val_acc did not improve
Epoch 00422: val_acc improved from 0.58382 to 0.58674, saving model to weights--7mlp--lb.best.hdf5
Epoch 00423: val_acc did not improve
Epoch 00424: val_acc did not improve
Epoch 00425: val_acc did not improve
Epoch 00426: val_acc did not improve
Epoch 00427: val_acc did not improve
Epoch 00428: val_acc did not improve
Epoch 00429: val_acc did not improve
Epoch 00430: val_acc did not improve
Epoch 00431: val_acc did not improve
Epoch 00432: val_acc did not improve
Epoch 00433: val_acc did not improve
Epoch 00434: val_acc did not improve
Epoch 00435: val_acc did not improve
Epoch 00436: val_acc did not improve
Epoch 00437: val_acc did not improve
Epoch 00438: val_acc did not improve
Epoch 00439: val_acc did not improve
Epoch 00440: val_acc did not improve
Epoch 00441: val_acc did not improve
Epoch 00442: val_acc did not improve
Epoch 00443: val_acc did not improve
Epoch 00444: val_acc did not improve
Epoch 00445: val_acc did not improve
Epoch 00446: val_acc did not improve
Epoch 00447: val_acc did not improve
Epoch 00448: val_acc did not improve
Epoch 00449: val_acc did not improve
Epoch 00450: val_acc did not improve
Epoch 00451: val_acc did not improve
Epoch 00452: val_acc did not improve
Epoch 00453: val_acc did not improve
Epoch 00454: val_acc did not improve
Epoch 00455: val_acc did not improve
Epoch 00456: val_acc did not improve
Epoch 00457: val_acc did not improve
Epoch 00458: val_acc did not improve
Epoch 00459: val_acc did not improve
Epoch 00460: val_acc did not improve
Epoch 00461: val_acc did not improve
Epoch 00462: val_acc did not improve
Epoch 00463: val_acc did not improve
Epoch 00464: val_acc did not improve
Epoch 00465: val_acc did not improve
Epoch 00466: val_acc did not improve
Epoch 00467: val_acc did not improve
Epoch 00468: val_acc did not improve
Epoch 00469: val_acc did not improve
Epoch 00470: val_acc did not improve
Epoch 00471: val_acc did not improve
Epoch 00472: val_acc did not improve
Epoch 00473: val_acc did not improve
Epoch 00474: val_acc did not improve
Epoch 00475: val_acc did not improve
Epoch 00476: val_acc did not improve
Epoch 00477: val_acc did not improve
Epoch 00478: val_acc did not improve
Epoch 00479: val_acc did not improve
Epoch 00480: val_acc did not improve
Epoch 00481: val_acc did not improve
Epoch 00482: val_acc did not improve
Epoch 00483: val_acc did not improve
Epoch 00484: val_acc did not improve
Epoch 00485: val_acc did not improve
Epoch 00486: val_acc did not improve
Epoch 00487: val_acc did not improve
Epoch 00488: val_acc did not improve
Epoch 00489: val_acc did not improve
Epoch 00490: val_acc did not improve
Epoch 00491: val_acc did not improve
Epoch 00492: val_acc did not improve
Epoch 00493: val_acc did not improve
Epoch 00494: val_acc did not improve
Epoch 00495: val_acc did not improve
Epoch 00496: val_acc did not improve
Epoch 00497: val_acc did not improve
Epoch 00498: val_acc did not improve
Epoch 00499: val_acc did not improve
Epoch 00500: val_acc did not improve
Epoch 00501: val_acc did not improve
Epoch 00502: val_acc did not improve
Epoch 00503: val_acc did not improve
Epoch 00504: val_acc did not improve
Epoch 00505: val_acc did not improve
Epoch 00506: val_acc did not improve
Epoch 00507: val_acc did not improve
Epoch 00508: val_acc did not improve
Epoch 00509: val_acc did not improve
Epoch 00510: val_acc did not improve
Epoch 00511: val_acc did not improve
Epoch 00512: val_acc did not improve
Epoch 00513: val_acc did not improve
Epoch 00514: val_acc did not improve
Epoch 00515: val_acc did not improve
Epoch 00516: val_acc did not improve
Epoch 00517: val_acc did not improve
Epoch 00518: val_acc did not improve
Epoch 00519: val_acc did not improve
Epoch 00520: val_acc did not improve
Epoch 00521: val_acc did not improve
Epoch 00522: val_acc did not improve
Epoch 00523: val_acc did not improve
Epoch 00524: val_acc did not improve
Epoch 00525: val_acc did not improve
Epoch 00526: val_acc did not improve
Epoch 00527: val_acc did not improve
Epoch 00528: val_acc did not improve
Epoch 00529: val_acc did not improve
Epoch 00530: val_acc did not improve
Epoch 00531: val_acc did not improve
Epoch 00532: val_acc did not improve
Epoch 00533: val_acc did not improve
Epoch 00534: val_acc did not improve
Epoch 00535: val_acc did not improve
Epoch 00536: val_acc did not improve
Epoch 00537: val_acc did not improve
Epoch 00538: val_acc did not improve
Epoch 00539: val_acc did not improve
Epoch 00540: val_acc did not improve
Epoch 00541: val_acc did not improve
Epoch 00542: val_acc did not improve
Epoch 00543: val_acc did not improve
Epoch 00544: val_acc did not improve
Epoch 00545: val_acc did not improve
Epoch 00546: val_acc did not improve
Epoch 00547: val_acc did not improve
Epoch 00548: val_acc did not improve
Epoch 00549: val_acc did not improve
Epoch 00550: val_acc did not improve
Epoch 00551: val_acc did not improve
Epoch 00552: val_acc did not improve
Epoch 00553: val_acc did not improve
Epoch 00554: val_acc did not improve
Epoch 00555: val_acc did not improve
Epoch 00556: val_acc did not improve
Epoch 00557: val_acc did not improve
Epoch 00558: val_acc did not improve
Epoch 00559: val_acc did not improve
Epoch 00560: val_acc did not improve
Epoch 00561: val_acc did not improve
Epoch 00562: val_acc improved from 0.58674 to 0.58772, saving model to weights--7mlp--lb.best.hdf5
Epoch 00563: val_acc did not improve
Epoch 00564: val_acc did not improve
Epoch 00565: val_acc did not improve
Epoch 00566: val_acc did not improve
Epoch 00567: val_acc did not improve
Epoch 00568: val_acc did not improve
Epoch 00569: val_acc did not improve
Epoch 00570: val_acc improved from 0.58772 to 0.59357, saving model to weights--7mlp--lb.best.hdf5
Epoch 00571: val_acc did not improve
Epoch 00572: val_acc did not improve
Epoch 00573: val_acc did not improve
Epoch 00574: val_acc did not improve
Epoch 00575: val_acc did not improve
Epoch 00576: val_acc did not improve
Epoch 00577: val_acc did not improve
Epoch 00578: val_acc did not improve
Epoch 00579: val_acc did not improve
Epoch 00580: val_acc did not improve
Epoch 00581: val_acc did not improve
Epoch 00582: val_acc did not improve
Epoch 00583: val_acc did not improve
Epoch 00584: val_acc did not improve
Epoch 00585: val_acc did not improve
Epoch 00586: val_acc did not improve
Epoch 00587: val_acc did not improve
Epoch 00588: val_acc did not improve
Epoch 00589: val_acc did not improve
Epoch 00590: val_acc did not improve
Epoch 00591: val_acc did not improve
Epoch 00592: val_acc did not improve
Epoch 00593: val_acc did not improve
Epoch 00594: val_acc did not improve
Epoch 00595: val_acc did not improve
Epoch 00596: val_acc did not improve
Epoch 00597: val_acc did not improve
Epoch 00598: val_acc did not improve
Epoch 00599: val_acc did not improve
Epoch 00600: val_acc did not improve
Epoch 00601: val_acc did not improve
Epoch 00602: val_acc did not improve
Epoch 00603: val_acc did not improve
Epoch 00604: val_acc did not improve
Epoch 00605: val_acc did not improve
Epoch 00606: val_acc did not improve
Epoch 00607: val_acc did not improve
Epoch 00608: val_acc did not improve
Epoch 00609: val_acc did not improve
Epoch 00610: val_acc did not improve
Epoch 00611: val_acc did not improve
Epoch 00612: val_acc did not improve
Epoch 00613: val_acc did not improve
Epoch 00614: val_acc did not improve
Epoch 00615: val_acc did not improve
Epoch 00616: val_acc did not improve
Epoch 00617: val_acc did not improve
Epoch 00618: val_acc did not improve
Epoch 00619: val_acc did not improve
Epoch 00620: val_acc did not improve
Epoch 00621: val_acc did not improve
Epoch 00622: val_acc did not improve
Epoch 00623: val_acc did not improve
Epoch 00624: val_acc did not improve
Epoch 00625: val_acc did not improve
Epoch 00626: val_acc did not improve
Epoch 00627: val_acc did not improve
Epoch 00628: val_acc did not improve
Epoch 00629: val_acc did not improve
Epoch 00630: val_acc did not improve
Epoch 00631: val_acc did not improve
Epoch 00632: val_acc did not improve
Epoch 00633: val_acc did not improve
Epoch 00634: val_acc did not improve
Epoch 00635: val_acc did not improve
Epoch 00636: val_acc did not improve
Epoch 00637: val_acc did not improve
Epoch 00638: val_acc did not improve
Epoch 00639: val_acc did not improve
Epoch 00640: val_acc did not improve
Epoch 00641: val_acc did not improve
Epoch 00642: val_acc did not improve
Epoch 00643: val_acc did not improve
Epoch 00644: val_acc did not improve
Epoch 00645: val_acc did not improve
Epoch 00646: val_acc did not improve
Epoch 00647: val_acc did not improve
Epoch 00648: val_acc did not improve
Epoch 00649: val_acc did not improve
Epoch 00650: val_acc did not improve
Epoch 00651: val_acc did not improve
Epoch 00652: val_acc did not improve
Epoch 00653: val_acc did not improve
Epoch 00654: val_acc did not improve
Epoch 00655: val_acc did not improve
Epoch 00656: val_acc did not improve
Epoch 00657: val_acc did not improve
Epoch 00658: val_acc did not improve
Epoch 00659: val_acc did not improve
Epoch 00660: val_acc did not improve
Epoch 00661: val_acc did not improve
Epoch 00662: val_acc did not improve
Epoch 00663: val_acc did not improve
Epoch 00664: val_acc did not improve
Epoch 00665: val_acc did not improve
Epoch 00666: val_acc did not improve
Epoch 00667: val_acc did not improve
Epoch 00668: val_acc did not improve
Epoch 00669: val_acc did not improve
Epoch 00670: val_acc did not improve
Epoch 00671: val_acc did not improve
Epoch 00672: val_acc did not improve
Epoch 00673: val_acc did not improve
Epoch 00674: val_acc did not improve
Epoch 00675: val_acc did not improve
Epoch 00676: val_acc did not improve
Epoch 00677: val_acc did not improve
Epoch 00678: val_acc did not improve
Epoch 00679: val_acc did not improve
Epoch 00680: val_acc did not improve
Epoch 00681: val_acc did not improve
Epoch 00682: val_acc did not improve
Epoch 00683: val_acc did not improve
Epoch 00684: val_acc did not improve
Epoch 00685: val_acc did not improve
Epoch 00686: val_acc did not improve
Epoch 00687: val_acc did not improve
Epoch 00688: val_acc did not improve
Epoch 00689: val_acc did not improve
Epoch 00690: val_acc did not improve
Epoch 00691: val_acc did not improve
Epoch 00692: val_acc did not improve
Epoch 00693: val_acc did not improve
Epoch 00694: val_acc did not improve
Epoch 00695: val_acc did not improve
Epoch 00696: val_acc did not improve
Epoch 00697: val_acc did not improve
Epoch 00698: val_acc did not improve
Epoch 00699: val_acc did not improve
Epoch 00700: val_acc did not improve
Epoch 00701: val_acc did not improve
Epoch 00702: val_acc did not improve
Epoch 00703: val_acc did not improve
Epoch 00704: val_acc did not improve
Epoch 00705: val_acc did not improve
Epoch 00706: val_acc did not improve
Epoch 00707: val_acc did not improve
Epoch 00708: val_acc did not improve
Epoch 00709: val_acc did not improve
Epoch 00710: val_acc did not improve
Epoch 00711: val_acc did not improve
Epoch 00712: val_acc did not improve
Epoch 00713: val_acc did not improve
Epoch 00714: val_acc did not improve
Epoch 00715: val_acc did not improve
Epoch 00716: val_acc did not improve
Epoch 00717: val_acc did not improve
Epoch 00718: val_acc did not improve
Epoch 00719: val_acc did not improve
Epoch 00720: val_acc did not improve
Epoch 00721: val_acc did not improve
Epoch 00722: val_acc did not improve
Epoch 00723: val_acc did not improve
Epoch 00724: val_acc did not improve
Epoch 00725: val_acc did not improve
Epoch 00726: val_acc did not improve
Epoch 00727: val_acc did not improve
Epoch 00728: val_acc did not improve
Epoch 00729: val_acc did not improve
Epoch 00730: val_acc did not improve
Epoch 00731: val_acc did not improve
Epoch 00732: val_acc did not improve
Epoch 00733: val_acc did not improve
Epoch 00734: val_acc did not improve
Epoch 00735: val_acc did not improve
Epoch 00736: val_acc did not improve
Epoch 00737: val_acc did not improve
Epoch 00738: val_acc did not improve
Epoch 00739: val_acc did not improve
Epoch 00740: val_acc did not improve
Epoch 00741: val_acc did not improve
Epoch 00742: val_acc did not improve
Epoch 00743: val_acc did not improve
Epoch 00744: val_acc did not improve
Epoch 00745: val_acc did not improve
Epoch 00746: val_acc did not improve
Epoch 00747: val_acc did not improve
Epoch 00748: val_acc did not improve
Epoch 00749: val_acc did not improve
Epoch 00750: val_acc did not improve
Epoch 00751: val_acc did not improve
Epoch 00752: val_acc did not improve
Epoch 00753: val_acc did not improve
Epoch 00754: val_acc did not improve
Epoch 00755: val_acc did not improve
Epoch 00756: val_acc did not improve
Epoch 00757: val_acc did not improve
Epoch 00758: val_acc did not improve
Epoch 00759: val_acc did not improve
Epoch 00760: val_acc did not improve
Epoch 00761: val_acc did not improve
Epoch 00762: val_acc did not improve
Epoch 00763: val_acc improved from 0.59357 to 0.59357, saving model to weights--7mlp--lb.best.hdf5
Epoch 00764: val_acc did not improve
Epoch 00765: val_acc improved from 0.59357 to 0.59942, saving model to weights--7mlp--lb.best.hdf5
Epoch 00766: val_acc did not improve
Epoch 00767: val_acc did not improve
Epoch 00768: val_acc did not improve
Epoch 00769: val_acc did not improve
Epoch 00770: val_acc did not improve
Epoch 00771: val_acc did not improve
Epoch 00772: val_acc did not improve
Epoch 00773: val_acc did not improve
Epoch 00774: val_acc did not improve
Epoch 00775: val_acc did not improve
Epoch 00776: val_acc did not improve
Epoch 00777: val_acc did not improve
Epoch 00778: val_acc did not improve
Epoch 00779: val_acc did not improve
Epoch 00780: val_acc did not improve
Epoch 00781: val_acc did not improve
Epoch 00782: val_acc did not improve
Epoch 00783: val_acc did not improve
Epoch 00784: val_acc did not improve
Epoch 00785: val_acc did not improve
Epoch 00786: val_acc did not improve
Epoch 00787: val_acc did not improve
Epoch 00788: val_acc did not improve
Epoch 00789: val_acc did not improve
Epoch 00790: val_acc did not improve
Epoch 00791: val_acc did not improve
Epoch 00792: val_acc did not improve
Epoch 00793: val_acc did not improve
Epoch 00794: val_acc did not improve
Epoch 00795: val_acc did not improve
Epoch 00796: val_acc did not improve
Epoch 00797: val_acc did not improve
Epoch 00798: val_acc did not improve
Epoch 00799: val_acc did not improve
Epoch 00800: val_acc did not improve
Epoch 00801: val_acc did not improve
Epoch 00802: val_acc did not improve
Epoch 00803: val_acc did not improve
Epoch 00804: val_acc did not improve
Epoch 00805: val_acc did not improve
Epoch 00806: val_acc did not improve
Epoch 00807: val_acc did not improve
Epoch 00808: val_acc did not improve
Epoch 00809: val_acc did not improve
Epoch 00810: val_acc did not improve
Epoch 00811: val_acc did not improve
Epoch 00812: val_acc did not improve
Epoch 00813: val_acc did not improve
Epoch 00814: val_acc did not improve
Epoch 00815: val_acc did not improve
Epoch 00816: val_acc did not improve
Epoch 00817: val_acc did not improve
Epoch 00818: val_acc did not improve
Epoch 00819: val_acc did not improve
Epoch 00820: val_acc did not improve
Epoch 00821: val_acc did not improve
Epoch 00822: val_acc did not improve
Epoch 00823: val_acc did not improve
Epoch 00824: val_acc did not improve
Epoch 00825: val_acc did not improve
Epoch 00826: val_acc did not improve
Epoch 00827: val_acc did not improve
Epoch 00828: val_acc did not improve
Epoch 00829: val_acc did not improve
Epoch 00830: val_acc did not improve
Epoch 00831: val_acc did not improve
Epoch 00832: val_acc did not improve
Epoch 00833: val_acc did not improve
Epoch 00834: val_acc did not improve
Epoch 00835: val_acc did not improve
Epoch 00836: val_acc did not improve
Epoch 00837: val_acc did not improve
Epoch 00838: val_acc did not improve
Epoch 00839: val_acc did not improve
Epoch 00840: val_acc did not improve
Epoch 00841: val_acc did not improve
Epoch 00842: val_acc did not improve
Epoch 00843: val_acc did not improve
Epoch 00844: val_acc did not improve
Epoch 00845: val_acc did not improve
Epoch 00846: val_acc did not improve
Epoch 00847: val_acc did not improve
Epoch 00848: val_acc did not improve
Epoch 00849: val_acc did not improve
Epoch 00850: val_acc did not improve
Epoch 00851: val_acc did not improve
Epoch 00852: val_acc did not improve
Epoch 00853: val_acc did not improve
Epoch 00854: val_acc did not improve
Epoch 00855: val_acc did not improve
Epoch 00856: val_acc did not improve
Epoch 00857: val_acc did not improve
Epoch 00858: val_acc did not improve
Epoch 00859: val_acc did not improve
Epoch 00860: val_acc did not improve
Epoch 00861: val_acc did not improve
Epoch 00862: val_acc did not improve
Epoch 00863: val_acc did not improve
Epoch 00864: val_acc did not improve
Epoch 00865: val_acc did not improve
Epoch 00866: val_acc did not improve
Epoch 00867: val_acc did not improve
Epoch 00868: val_acc did not improve
Epoch 00869: val_acc did not improve
Epoch 00870: val_acc did not improve
Epoch 00871: val_acc did not improve
Epoch 00872: val_acc did not improve
Epoch 00873: val_acc did not improve
Epoch 00874: val_acc did not improve
Epoch 00875: val_acc did not improve
Epoch 00876: val_acc did not improve
Epoch 00877: val_acc did not improve
Epoch 00878: val_acc did not improve
Epoch 00879: val_acc did not improve
Epoch 00880: val_acc did not improve
Epoch 00881: val_acc did not improve
Epoch 00882: val_acc did not improve
Epoch 00883: val_acc did not improve
Epoch 00884: val_acc did not improve
Epoch 00885: val_acc did not improve
Epoch 00886: val_acc did not improve
Epoch 00887: val_acc did not improve
Epoch 00888: val_acc did not improve
Epoch 00889: val_acc did not improve
Epoch 00890: val_acc did not improve
Epoch 00891: val_acc did not improve
Epoch 00892: val_acc did not improve
Epoch 00893: val_acc did not improve
Epoch 00894: val_acc did not improve
Epoch 00895: val_acc did not improve
Epoch 00896: val_acc did not improve
Epoch 00897: val_acc did not improve
Epoch 00898: val_acc did not improve
Epoch 00899: val_acc did not improve
Epoch 00900: val_acc did not improve
Epoch 00901: val_acc did not improve
Epoch 00902: val_acc did not improve
Epoch 00903: val_acc did not improve
Epoch 00904: val_acc did not improve
Epoch 00905: val_acc did not improve
Epoch 00906: val_acc did not improve
Epoch 00907: val_acc did not improve
Epoch 00908: val_acc did not improve
Epoch 00909: val_acc did not improve
Epoch 00910: val_acc did not improve
Epoch 00911: val_acc did not improve
Epoch 00912: val_acc did not improve
Epoch 00913: val_acc did not improve
Epoch 00914: val_acc did not improve
Epoch 00915: val_acc did not improve
Epoch 00916: val_acc did not improve
Epoch 00917: val_acc did not improve
Epoch 00918: val_acc did not improve
Epoch 00919: val_acc did not improve
Epoch 00920: val_acc did not improve
Epoch 00921: val_acc did not improve
Epoch 00922: val_acc did not improve
Epoch 00923: val_acc did not improve
Epoch 00924: val_acc did not improve
Epoch 00925: val_acc did not improve
Epoch 00926: val_acc did not improve
Epoch 00927: val_acc did not improve
Epoch 00928: val_acc did not improve
Epoch 00929: val_acc did not improve
Epoch 00930: val_acc did not improve
Epoch 00931: val_acc did not improve
Epoch 00932: val_acc did not improve
Epoch 00933: val_acc did not improve
Epoch 00934: val_acc improved from 0.59942 to 0.60136, saving model to weights--7mlp--lb.best.hdf5
Epoch 00935: val_acc did not improve
Epoch 00936: val_acc did not improve
Epoch 00937: val_acc did not improve
Epoch 00938: val_acc did not improve
Epoch 00939: val_acc did not improve
Epoch 00940: val_acc did not improve
Epoch 00941: val_acc did not improve
Epoch 00942: val_acc did not improve
Epoch 00943: val_acc did not improve
Epoch 00944: val_acc did not improve
Epoch 00945: val_acc did not improve
Epoch 00946: val_acc did not improve
Epoch 00947: val_acc did not improve
Epoch 00948: val_acc did not improve
Epoch 00949: val_acc did not improve
Epoch 00950: val_acc did not improve
Epoch 00951: val_acc did not improve
Epoch 00952: val_acc did not improve
Epoch 00953: val_acc did not improve
Epoch 00954: val_acc did not improve
Epoch 00955: val_acc did not improve
Epoch 00956: val_acc did not improve
Epoch 00957: val_acc did not improve
Epoch 00958: val_acc did not improve
Epoch 00959: val_acc did not improve
Epoch 00960: val_acc did not improve
Epoch 00961: val_acc did not improve
Epoch 00962: val_acc did not improve
Epoch 00963: val_acc did not improve
Epoch 00964: val_acc did not improve
Epoch 00965: val_acc did not improve
Epoch 00966: val_acc did not improve
Epoch 00967: val_acc did not improve
Epoch 00968: val_acc did not improve
Epoch 00969: val_acc did not improve
Epoch 00970: val_acc did not improve
Epoch 00971: val_acc did not improve
Epoch 00972: val_acc did not improve
Epoch 00973: val_acc did not improve
Epoch 00974: val_acc did not improve
Epoch 00975: val_acc did not improve
Epoch 00976: val_acc did not improve
Epoch 00977: val_acc did not improve
Epoch 00978: val_acc did not improve
Epoch 00979: val_acc did not improve
Epoch 00980: val_acc did not improve
Epoch 00981: val_acc did not improve
Epoch 00982: val_acc did not improve
Epoch 00983: val_acc did not improve
Epoch 00984: val_acc did not improve
Epoch 00985: val_acc did not improve
Epoch 00986: val_acc did not improve
Epoch 00987: val_acc did not improve
Epoch 00988: val_acc did not improve
Epoch 00989: val_acc did not improve
Epoch 00990: val_acc did not improve
Epoch 00991: val_acc did not improve
Epoch 00992: val_acc did not improve
Epoch 00993: val_acc did not improve
Epoch 00994: val_acc did not improve
Epoch 00995: val_acc did not improve
Epoch 00996: val_acc did not improve
Epoch 00997: val_acc did not improve
Epoch 00998: val_acc did not improve
Epoch 00999: val_acc did not improve

In [42]:
import matplotlib.pyplot as plt
from sklearn.metrics import confusion_matrix, roc_auc_score, accuracy_score

# TODO: Reload the best model and weights from the previous step, before showing the evaluation


# Other performance/accuracy metrics
Y_pred = model.predict(X_test_lb)
print Y_pred.shape

# Accuracy
print('Accuracy:')
print(accuracy_score(numpy.argmax(Y_test_lb, axis=1), numpy.argmax(Y_pred, axis=1)))


# Confusion matrix
cm = confusion_matrix(numpy.argmax(Y_test_lb, axis=1), numpy.argmax(Y_pred, axis=1))
numpy.set_printoptions(precision=2)
print('Confusion matrix:')
print(cm)

# AUC
roc = roc_auc_score(Y_test_lb, Y_pred, average='macro')
print('AUC score:')
print(roc)
    

# summarize history for accuracy
plt.plot(history.history['acc'])
plt.plot(history.history['val_acc'])
plt.title('model accuracy')
plt.ylabel('accuracy')
plt.xlabel('epoch')
plt.legend(['train','test'], loc='upper left')
plt.show()
# summarize history for loss
plt.plot(history.history['loss'])
plt.plot(history.history['val_loss'])
plt.title('model loss')
plt.ylabel('loss')
plt.xlabel('epoch')
plt.legend(['train','test'], loc='upper left')
plt.show()


(1026, 5)
Accuracy:
0.598440545809
Confusion matrix:
[[161   3  19  11   0]
 [  6 234  38  15  32]
 [ 57  80 136  25  27]
 [ 25   6  25  34   5]
 [  5  24   5   4  49]]
AUC score:
0.816970897963

So, the lookback MLP approach (from 100-component PCA) seems to work a bit better (and beat the raw random forest), but not by much.

Predicting social plane, using PCA-MLP and lookback

... similar to the previous one.


In [45]:
X_train = train[:,3:7558].astype(float)
#X_train = train[:,3:13].astype(float)
Y_train = train[:,7559]
X_test = test[:,3:7558].astype(float)
#X_test = test[:,3:13].astype(float)
Y_test = test[:,7559]

# One hot encoding of the response variable (using dummy variables)
from keras.utils.np_utils import to_categorical

# encode class values as integers
encoder = LabelEncoder()
encoder.fit(Y_train)
encoded_Y_train = encoder.transform(Y_train)
# convert integers to dummy variables (i.e. one hot encoded)
dummy_y_train = to_categorical(encoded_Y_train)
encoder.fit(Y_test)
encoded_Y_test = encoder.transform(Y_test)
# convert integers to dummy variables (i.e. one hot encoded)
dummy_y_test = to_categorical(encoded_Y_test)

# Sanity check on matrix dimensions, after droppinig null/nans
#print X_train.shape #(4472, 7555)
#print Y_test.shape #(1044, )
#print dummy_y_test.shape # (1044, 5)

In [46]:
# We standardize on the basis of the training data
scaler = StandardScaler().fit(X_train)
X_train_st = scaler.transform(X_train)
X_test_st = scaler.transform(X_test)

# We do PCA, too
from sklearn import decomposition
n_comp = 100
pca = decomposition.PCA(n_components=n_comp)
X_train_pca = pca.fit_transform(X_train_st)
X_test_pca = pca.transform(X_test_st)

print 'Variance explained:'
print pca.explained_variance_ratio_
print 'Total variance explained by '+str(n_comp)+' components:'
print sum(pca.explained_variance_ratio_)


Variance explained:
[ 0.2   0.08  0.04  0.02  0.02  0.02  0.01  0.01  0.01  0.01  0.01  0.01
  0.01  0.01  0.01  0.01  0.01  0.01  0.    0.    0.    0.    0.    0.    0.
  0.    0.    0.    0.    0.    0.    0.    0.    0.    0.    0.    0.    0.
  0.    0.    0.    0.    0.    0.    0.    0.    0.    0.    0.    0.    0.
  0.    0.    0.    0.    0.    0.    0.    0.    0.    0.    0.    0.    0.
  0.    0.    0.    0.    0.    0.    0.    0.    0.    0.    0.    0.    0.
  0.    0.    0.    0.    0.    0.    0.    0.    0.    0.    0.    0.    0.
  0.    0.    0.    0.    0.    0.    0.    0.    0.    0.  ]
Total variance explained by 100 components:
0.630815671837

In [47]:
# We create the look-back dataset
#print train[:,1:3] # session is [1], timestamp is [2]

# convert an array of values into a dataset matrix, depending on the nr. of steps to look back
def create_dataset(sessiontimes, X, Y, look_back=1):
    dataX, dataY = [], []
    if((sessiontimes.shape[0] != X.shape[0]) or 
       (sessiontimes.shape[0] != Y.shape[0])):
        # Different number of rows, something's wrong!
        print 'Dimensions of the X, Y and timestamps do not match!!'
        return None
    sessions = numpy.unique(sessiontimes[:,0])
    for session in sessions:
        print session
        stimes = sessiontimes[numpy.where(sessiontimes[:,0] == session)]
        sessionX = X[numpy.where(sessiontimes[:,0] == session)]
        sessionY = Y[numpy.where(sessiontimes[:,0] == session)]
        sessiondataX, sessiondataY = [], []
        # For Y, we just eliminate the first look_back rows
        sessiondataY = sessionY[look_back:sessionY.shape[0],:]
        # For X, se successively roll and append the data, then eliminate the first look_back rows
        for i in range(look_back+1):
            rolled = numpy.roll(sessionX,i,axis=0)
            if(i == 0):
                sessiondataX = numpy.array(rolled)
            else:
                sessiondataX = numpy.hstack((sessiondataX, numpy.array(rolled)))
        sessiondataX = sessiondataX[look_back:(sessiondataX.shape[0]),:]
        # We join all the sessions data
        if len(dataX)==0:
            dataX = sessiondataX
            dataY = sessiondataY
        else:
            dataX = numpy.vstack((dataX, sessiondataX))
            dataY = numpy.vstack((dataY, sessiondataY))
    return dataX, dataY

# reshape into X=t and Y=t+1
lookbk = 9
X_train_lb, Y_train_lb = create_dataset(train[:,1:3], X_train_pca, dummy_y_train, lookbk)
X_test_lb, Y_test_lb = create_dataset(test[:,1:3], X_test_pca, dummy_y_test, lookbk)
print X_train_lb.shape, Y_test_lb.shape


case1-day1-session2-teacher1
case1-day1-session3-teacher1
case1-day1-session4-teacher1
case2-day1-session1-teacher2
case2-day1-session2-teacher2
case2-day2-session1-teacher2
case2-day2-session2-teacher2
case2-day3-session2-teacher2
case2-day4-session1-teacher2
case2-day4-session2-teacher2
case1-day1-session1-teacher1
case2-day3-session1-teacher2
(4382, 1000) (1026, 4)

In [50]:
from keras.layers import Dropout
from keras.constraints import maxnorm
from keras.optimizers import SGD

# Apply dropout regularization, it is overfitting!
def create_deeper_dropout_decay_PCA(n_components, lookback, learningrate, n_epochs):
    # create model
    model = Sequential()
    model.add(Dropout(0.2, input_shape=(n_components*(lookback+1),)))
    model.add(Dense(300, init='uniform', activation='tanh'))
    model.add(Dropout(0.2))
    model.add(Dense(300, init='uniform', activation='tanh'))
    model.add(Dropout(0.2))
    model.add(Dense(80, init='uniform', activation='tanh'))
    model.add(Dropout(0.2))
    model.add(Dense(80, init='uniform', activation='tanh'))
    model.add(Dropout(0.2))
    model.add(Dense(20, init='uniform', activation='tanh'))
    model.add(Dropout(0.2))
    model.add(Dense(20, init='uniform', activation='tanh'))
    model.add(Dropout(0.2))
    model.add(Dense(4, init='uniform', activation='sigmoid'))
    # Compile model, with larger learning rate and momentum, as recommended by the original paper
    sgd = SGD(lr=learningrate, momentum=0.8, decay=learningrate/n_epochs, nesterov=False)
    model.compile(loss='categorical_crossentropy', optimizer=sgd, metrics=['accuracy'])
    return model

# evaluate baseline model with standardized dataset
numpy.random.seed(seed)
#estimators = []
#estimators.append(('standardize', StandardScaler()))
#estimators.append(('mlp', KerasClassifier(build_fn=create_baseline, nb_epoch=10, batch_size=10, verbose=1)))
# We define a pipeline of estimators, in which first the scaler is fitted to the data, then the MLP is applied
#pipeline = Pipeline(estimators)
#kfold = StratifiedKFold(y=Y_train, n_folds=3, shuffle=True, random_state=seed)
#model = create_baseline()
learningrate = 0.1
n_epochs = 20
model = create_deeper_dropout_decay_PCA(n_components=n_comp, lookback=lookbk, learningrate=learningrate, n_epochs=n_epochs)
print model.summary()


____________________________________________________________________________________________________
Layer (type)                     Output Shape          Param #     Connected to                     
====================================================================================================
dropout_36 (Dropout)             (None, 1000)          0           dropout_input_6[0][0]            
____________________________________________________________________________________________________
dense_36 (Dense)                 (None, 300)           300300      dropout_36[0][0]                 
____________________________________________________________________________________________________
dropout_37 (Dropout)             (None, 300)           0           dense_36[0][0]                   
____________________________________________________________________________________________________
dense_37 (Dense)                 (None, 300)           90300       dropout_37[0][0]                 
____________________________________________________________________________________________________
dropout_38 (Dropout)             (None, 300)           0           dense_37[0][0]                   
____________________________________________________________________________________________________
dense_38 (Dense)                 (None, 80)            24080       dropout_38[0][0]                 
____________________________________________________________________________________________________
dropout_39 (Dropout)             (None, 80)            0           dense_38[0][0]                   
____________________________________________________________________________________________________
dense_39 (Dense)                 (None, 80)            6480        dropout_39[0][0]                 
____________________________________________________________________________________________________
dropout_40 (Dropout)             (None, 80)            0           dense_39[0][0]                   
____________________________________________________________________________________________________
dense_40 (Dense)                 (None, 20)            1620        dropout_40[0][0]                 
____________________________________________________________________________________________________
dropout_41 (Dropout)             (None, 20)            0           dense_40[0][0]                   
____________________________________________________________________________________________________
dense_41 (Dense)                 (None, 20)            420         dropout_41[0][0]                 
____________________________________________________________________________________________________
dropout_42 (Dropout)             (None, 20)            0           dense_41[0][0]                   
____________________________________________________________________________________________________
dense_42 (Dense)                 (None, 4)             84          dropout_42[0][0]                 
====================================================================================================
Total params: 423284
____________________________________________________________________________________________________
None

In [51]:
from keras.callbacks import ModelCheckpoint

# To save the best model
# serialize model to JSON
model_json = model.to_json()
with open("social.model--7mlp--lb.json", "w") as json_file:
    json_file.write(model_json)
filepath="social.weights--7mlp--lb.best.hdf5"
# Define that the accuracy in cv is monitored, and that weights are stored in a file when max accuracy is achieved
checkpoint = ModelCheckpoint(filepath, monitor='val_acc', verbose=1, save_best_only=True, mode='max')
callbacks_list = [checkpoint]

# Fit the model
history = model.fit(X_train_lb, Y_train_lb, validation_data=(X_test_lb,Y_test_lb), 
                    nb_epoch=n_epochs, batch_size=10, verbose=0, callbacks=callbacks_list)
#results = cross_val_score(pipeline, X_train, dummy_y_train, cv=kfold)
#print("Standardized data Acc (in CV training data): %.2f%% (%.2f%%)" % (results.mean()*100, results.std()*100))
# evaluate the model
#scores = pipeline.evaluate(X_test, dummy_y_test)
#print pipeline.metrics_names[1]
#print scores[1]*100
# For other metrics, see http://machinelearningmastery.com/metrics-evaluate-machine-learning-algorithms-python/


Epoch 00000: val_acc improved from -inf to 0.62183, saving model to social.weights--7mlp--lb.best.hdf5
Epoch 00001: val_acc did not improve
Epoch 00002: val_acc did not improve
Epoch 00003: val_acc did not improve
Epoch 00004: val_acc did not improve
Epoch 00005: val_acc did not improve
Epoch 00006: val_acc improved from 0.62183 to 0.62378, saving model to social.weights--7mlp--lb.best.hdf5
Epoch 00007: val_acc improved from 0.62378 to 0.62768, saving model to social.weights--7mlp--lb.best.hdf5
Epoch 00008: val_acc improved from 0.62768 to 0.63743, saving model to social.weights--7mlp--lb.best.hdf5
Epoch 00009: val_acc improved from 0.63743 to 0.63840, saving model to social.weights--7mlp--lb.best.hdf5
Epoch 00010: val_acc improved from 0.63840 to 0.65497, saving model to social.weights--7mlp--lb.best.hdf5
Epoch 00011: val_acc did not improve
Epoch 00012: val_acc did not improve
Epoch 00013: val_acc did not improve
Epoch 00014: val_acc improved from 0.65497 to 0.67544, saving model to social.weights--7mlp--lb.best.hdf5
Epoch 00015: val_acc did not improve
Epoch 00016: val_acc did not improve
Epoch 00017: val_acc did not improve
Epoch 00018: val_acc did not improve
Epoch 00019: val_acc did not improve

In [52]:
import matplotlib.pyplot as plt
from sklearn.metrics import confusion_matrix, roc_auc_score, accuracy_score

# TODO: Reload the best model and weights from the previous step, before showing the evaluation


# Other performance/accuracy metrics
Y_pred = model.predict(X_test_lb)
print Y_pred.shape

# Accuracy
print('Accuracy:')
print(accuracy_score(numpy.argmax(Y_test_lb, axis=1), numpy.argmax(Y_pred, axis=1)))


# Confusion matrix
cm = confusion_matrix(numpy.argmax(Y_test_lb, axis=1), numpy.argmax(Y_pred, axis=1))
numpy.set_printoptions(precision=2)
print('Confusion matrix:')
print(cm)

# AUC
roc = roc_auc_score(Y_test_lb, Y_pred, average='macro')
print('AUC score:')
print(roc)
    

# summarize history for accuracy
plt.plot(history.history['acc'])
plt.plot(history.history['val_acc'])
plt.title('model accuracy')
plt.ylabel('accuracy')
plt.xlabel('epoch')
plt.legend(['train','test'], loc='upper left')
plt.show()
# summarize history for loss
plt.plot(history.history['loss'])
plt.plot(history.history['val_loss'])
plt.title('model loss')
plt.ylabel('loss')
plt.xlabel('epoch')
plt.legend(['train','test'], loc='upper left')
plt.show()


(1026, 4)
Accuracy:
0.671539961014
Confusion matrix:
[[631   0   0   7]
 [217   0   0   2]
 [ 23   0   1   0]
 [ 70   0  18  57]]
AUC score:
0.643159327864

In [ ]: