In [1]:
import keras
from keras.models import Sequential
from keras.layers import Dense, Dropout, Activation, LSTM
from keras.optimizers import SGD, Adam
from keras.layers.normalization import BatchNormalization

from read_dataset import read_mfcc_with_train_test
from read_saved_models import loadMfccStanderdScaler


Using TensorFlow backend.

In [2]:
def getStanderizedData(data):
    data_shape = data.shape
    n = data_shape[0]
    reshaped_data = data.reshape(n, -1)
    saved_ss = loadMfccStanderdScaler()
    trasformed_data = saved_ss.transform(reshaped_data)
    ret_data = trasformed_data.reshape(data_shape)
    return ret_data

In [3]:
X_train, X_test, y_train, y_test = read_mfcc_with_train_test()

In [4]:
X_ss_train = getStanderizedData(X_train)
X_ss_test = getStanderizedData(X_test)

In [5]:
print(X_train[0][0][0])
print(X_ss_train[0][0][0])


-88.9449916743
0.516393903012

In [6]:
print(X_train.shape)
print(X_test.shape)
print(y_train.shape)
print(y_test.shape)
print(y_train[0])


(600, 431, 5)
(400, 431, 5)
(600, 10)
(400, 10)
[ 0.  0.  0.  0.  0.  0.  0.  0.  1.  0.]

In [7]:
model = Sequential()
model.add(LSTM(30, activation='relu', recurrent_activation='relu', recurrent_dropout=0.3, input_shape=(431, 5)))
model.add(BatchNormalization())
model.add(Dense(10, activation='softmax'))

adam = Adam(lr=0.01, decay=1e-6)
model.compile(loss='categorical_crossentropy', optimizer=adam, metrics=['accuracy'])

In [8]:
model.fit(X_ss_train, y_train, epochs=10, batch_size=1, verbose=0)


Out[8]:
<keras.callbacks.History at 0x7f60bda61c18>

In [9]:
score = model.evaluate(X_ss_test, y_test, batch_size=100)
print("")
print(model.metrics_names)
print(score)


400/400 [==============================] - 0s     

['loss', 'acc']
[nan, 0.097499998286366463]

In [10]:
model_filepath = "./savedModels/mfcc_lstm_model.h5"
model.save(model_filepath)