In [1]:
import keras
from keras.models import Sequential
from keras.layers import Dense, Dropout, Activation, LSTM
from keras.optimizers import SGD, Adam
from keras.layers.normalization import BatchNormalization

from read_dataset import read_mfcc_with_train_test
from read_saved_models import loadMfccStanderdScaler


Using TensorFlow backend.

In [2]:
def getStanderizedData(data):
    data_shape = data.shape
    n = data_shape[0]
    reshaped_data = data.reshape(n, -1)
    saved_ss = loadMfccStanderdScaler()
    trasformed_data = saved_ss.transform(reshaped_data)
    ret_data = trasformed_data.reshape(data_shape)
    return ret_data

In [7]:
X_train, X_test, y_train, y_test = read_mfcc_with_train_test()

In [4]:
X_ss_train = getStanderizedData(X_train)
X_ss_test = getStanderizedData(X_test)

In [5]:
print(X_train[0][0][0])
print(X_ss_train[0][0][0])


-88.9449916743
0.516393903012

In [8]:
print(X_train.shape)
print(X_test.shape)
print(y_train.shape)
print(y_test.shape)
print(y_train[0])


(600, 1293, 30)
(400, 1293, 30)
(600, 10)
(400, 10)
[ 0.  0.  0.  0.  0.  0.  0.  0.  1.  0.]

In [9]:
model = Sequential()
model.add(LSTM(30, activation='relu', recurrent_activation='relu', recurrent_dropout=0.3, input_shape=(431, 5)))
model.add(BatchNormalization())
model.add(Dense(10, activation='softmax'))

adam = Adam(lr=0.01, decay=1e-6)
model.compile(loss='categorical_crossentropy', optimizer=adam, metrics=['accuracy'])

In [10]:
model.fit(X_ss_train, y_train, epochs=10, batch_size=1, verbose=0)


---------------------------------------------------------------------------
KeyboardInterrupt                         Traceback (most recent call last)
<ipython-input-10-d6465b88abfe> in <module>()
----> 1 model.fit(X_ss_train, y_train, epochs=10, batch_size=1, verbose=0)

/home/minato/.pyenv/versions/anaconda3-4.2.0/lib/python3.5/site-packages/keras/models.py in fit(self, x, y, batch_size, epochs, verbose, callbacks, validation_split, validation_data, shuffle, class_weight, sample_weight, initial_epoch, **kwargs)
    868                               class_weight=class_weight,
    869                               sample_weight=sample_weight,
--> 870                               initial_epoch=initial_epoch)
    871 
    872     def evaluate(self, x, y, batch_size=32, verbose=1,

/home/minato/.pyenv/versions/anaconda3-4.2.0/lib/python3.5/site-packages/keras/engine/training.py in fit(self, x, y, batch_size, epochs, verbose, callbacks, validation_split, validation_data, shuffle, class_weight, sample_weight, initial_epoch, **kwargs)
   1505                               val_f=val_f, val_ins=val_ins, shuffle=shuffle,
   1506                               callback_metrics=callback_metrics,
-> 1507                               initial_epoch=initial_epoch)
   1508 
   1509     def evaluate(self, x, y, batch_size=32, verbose=1, sample_weight=None):

/home/minato/.pyenv/versions/anaconda3-4.2.0/lib/python3.5/site-packages/keras/engine/training.py in _fit_loop(self, f, ins, out_labels, batch_size, epochs, verbose, callbacks, val_f, val_ins, shuffle, callback_metrics, initial_epoch)
   1154                 batch_logs['size'] = len(batch_ids)
   1155                 callbacks.on_batch_begin(batch_index, batch_logs)
-> 1156                 outs = f(ins_batch)
   1157                 if not isinstance(outs, list):
   1158                     outs = [outs]

/home/minato/.pyenv/versions/anaconda3-4.2.0/lib/python3.5/site-packages/keras/backend/tensorflow_backend.py in __call__(self, inputs)
   2267         updated = session.run(self.outputs + [self.updates_op],
   2268                               feed_dict=feed_dict,
-> 2269                               **self.session_kwargs)
   2270         return updated[:len(self.outputs)]
   2271 

/home/minato/.pyenv/versions/anaconda3-4.2.0/lib/python3.5/site-packages/tensorflow/python/client/session.py in run(self, fetches, feed_dict, options, run_metadata)
    776     try:
    777       result = self._run(None, fetches, feed_dict, options_ptr,
--> 778                          run_metadata_ptr)
    779       if run_metadata:
    780         proto_data = tf_session.TF_GetBuffer(run_metadata_ptr)

/home/minato/.pyenv/versions/anaconda3-4.2.0/lib/python3.5/site-packages/tensorflow/python/client/session.py in _run(self, handle, fetches, feed_dict, options, run_metadata)
    980     if final_fetches or final_targets:
    981       results = self._do_run(handle, final_targets, final_fetches,
--> 982                              feed_dict_string, options, run_metadata)
    983     else:
    984       results = []

/home/minato/.pyenv/versions/anaconda3-4.2.0/lib/python3.5/site-packages/tensorflow/python/client/session.py in _do_run(self, handle, target_list, fetch_list, feed_dict, options, run_metadata)
   1030     if handle is None:
   1031       return self._do_call(_run_fn, self._session, feed_dict, fetch_list,
-> 1032                            target_list, options, run_metadata)
   1033     else:
   1034       return self._do_call(_prun_fn, self._session, handle, feed_dict,

/home/minato/.pyenv/versions/anaconda3-4.2.0/lib/python3.5/site-packages/tensorflow/python/client/session.py in _do_call(self, fn, *args)
   1037   def _do_call(self, fn, *args):
   1038     try:
-> 1039       return fn(*args)
   1040     except errors.OpError as e:
   1041       message = compat.as_text(e.message)

/home/minato/.pyenv/versions/anaconda3-4.2.0/lib/python3.5/site-packages/tensorflow/python/client/session.py in _run_fn(session, feed_dict, fetch_list, target_list, options, run_metadata)
   1019         return tf_session.TF_Run(session, options,
   1020                                  feed_dict, fetch_list, target_list,
-> 1021                                  status, run_metadata)
   1022 
   1023     def _prun_fn(session, handle, feed_dict, fetch_list):

KeyboardInterrupt: 

In [12]:
score = model.evaluate(X_ss_test, y_test, batch_size=100)
print("")
print(model.metrics_names)
print(score)


400/400 [==============================] - 7s     

['loss', 'acc']
[nan, 0.097499998286366463]

In [ ]:
model_filepath = "./savedModels/mfcc_lstm_model.h5"
model.save(model_filepath)