In [3]:
import numpy as np
import pandas as pd
In [4]:
from keras.models import Sequential
from keras.layers import Dense
from keras.wrappers.scikit_learn import KerasClassifier
Using TensorFlow backend.
In [41]:
from sklearn.model_selection import cross_val_score
from sklearn.preprocessing import LabelEncoder
from sklearn.model_selection import StratifiedKFold, StratifiedShuffleSplit
from sklearn.preprocessing import StandardScaler
from sklearn.pipeline import Pipeline
In [6]:
# fix random seed for reproducibility
seed = 7
np.random.seed(seed)
In [7]:
X = pd.read_csv('./pts.csv', index_col=0)
X = X.drop(['system_id', 'status_id', 'time', 'matrix_id'], axis=1)
In [8]:
X_vals = X.values
X_vals.shape
Out[8]:
(871535, 79)
In [47]:
y = pd.read_csv('./labels.csv', index_col=0).as_matrix()
lbl_error = y[:,1]
lbl_error
Out[47]:
array([1, 1, 0, ..., 1, 1, 1], dtype=int64)
In [53]:
# baseline model
def create_baseline():
# create model
model = Sequential()
model.add(Dense(79, input_dim=79, kernel_initializer='normal', activation='relu'))
model.add(Dense(1, kernel_initializer='normal', activation='sigmoid'))
# Compile model
model.compile(loss='binary_crossentropy', optimizer='adam', metrics=['accuracy'])
return model
In [54]:
# evaluate model with standardized dataset
with tf.device('/gpu:0'):
estimator = KerasClassifier(build_fn=create_baseline, nb_epoch=100, batch_size=8192)
kfold = StratifiedShuffleSplit(n_splits=10, random_state=seed)
results = cross_val_score(estimator, X_vals, lbl_error, cv=kfold)
print("Results: %.2f%% (%.2f%%)" % (results.mean()*100, results.std()*100))
#Original Results: 49.28% (8.12%)
#Standardized Results: 60.06% (0.11%)
Epoch 1/10
784381/784381 [==============================] - 2s - loss: 3.3643 - acc: 0.7912
Epoch 2/10
784381/784381 [==============================] - 1s - loss: 3.3035 - acc: 0.7950
Epoch 3/10
784381/784381 [==============================] - 1s - loss: 3.3035 - acc: 0.7950
Epoch 4/10
784381/784381 [==============================] - 1s - loss: 3.3035 - acc: 0.7950
Epoch 5/10
784381/784381 [==============================] - 1s - loss: 3.3035 - acc: 0.7950
Epoch 6/10
598016/784381 [=====================>........] - ETA: 0s - loss: 3.3062 - acc: 0.7949
---------------------------------------------------------------------------
KeyboardInterrupt Traceback (most recent call last)
<ipython-input-54-72dba4b8ebbf> in <module>()
3 estimator = KerasClassifier(build_fn=create_baseline, nb_epoch=100, batch_size=8192)
4 kfold = StratifiedShuffleSplit(n_splits=10, random_state=seed)
----> 5 results = cross_val_score(estimator, X_vals, lbl_error, cv=kfold)
6 print("Results: %.2f%% (%.2f%%)" % (results.mean()*100, results.std()*100))
7
C:\Anaconda3\lib\site-packages\sklearn\model_selection\_validation.py in cross_val_score(estimator, X, y, groups, scoring, cv, n_jobs, verbose, fit_params, pre_dispatch)
138 train, test, verbose, None,
139 fit_params)
--> 140 for train, test in cv_iter)
141 return np.array(scores)[:, 0]
142
C:\Anaconda3\lib\site-packages\sklearn\externals\joblib\parallel.py in __call__(self, iterable)
756 # was dispatched. In particular this covers the edge
757 # case of Parallel used with an exhausted iterator.
--> 758 while self.dispatch_one_batch(iterator):
759 self._iterating = True
760 else:
C:\Anaconda3\lib\site-packages\sklearn\externals\joblib\parallel.py in dispatch_one_batch(self, iterator)
606 return False
607 else:
--> 608 self._dispatch(tasks)
609 return True
610
C:\Anaconda3\lib\site-packages\sklearn\externals\joblib\parallel.py in _dispatch(self, batch)
569 dispatch_timestamp = time.time()
570 cb = BatchCompletionCallBack(dispatch_timestamp, len(batch), self)
--> 571 job = self._backend.apply_async(batch, callback=cb)
572 self._jobs.append(job)
573
C:\Anaconda3\lib\site-packages\sklearn\externals\joblib\_parallel_backends.py in apply_async(self, func, callback)
107 def apply_async(self, func, callback=None):
108 """Schedule a func to be run"""
--> 109 result = ImmediateResult(func)
110 if callback:
111 callback(result)
C:\Anaconda3\lib\site-packages\sklearn\externals\joblib\_parallel_backends.py in __init__(self, batch)
324 # Don't delay the application, to avoid keeping the input
325 # arguments in memory
--> 326 self.results = batch()
327
328 def get(self):
C:\Anaconda3\lib\site-packages\sklearn\externals\joblib\parallel.py in __call__(self)
129
130 def __call__(self):
--> 131 return [func(*args, **kwargs) for func, args, kwargs in self.items]
132
133 def __len__(self):
C:\Anaconda3\lib\site-packages\sklearn\externals\joblib\parallel.py in <listcomp>(.0)
129
130 def __call__(self):
--> 131 return [func(*args, **kwargs) for func, args, kwargs in self.items]
132
133 def __len__(self):
C:\Anaconda3\lib\site-packages\sklearn\model_selection\_validation.py in _fit_and_score(estimator, X, y, scorer, train, test, verbose, parameters, fit_params, return_train_score, return_parameters, return_n_test_samples, return_times, error_score)
236 estimator.fit(X_train, **fit_params)
237 else:
--> 238 estimator.fit(X_train, y_train, **fit_params)
239
240 except Exception as e:
C:\Anaconda3\lib\site-packages\keras\wrappers\scikit_learn.py in fit(self, x, y, **kwargs)
201 raise ValueError('Invalid shape for y: ' + str(y.shape))
202 self.n_classes_ = len(self.classes_)
--> 203 return super(KerasClassifier, self).fit(x, y, **kwargs)
204
205 def predict(self, x, **kwargs):
C:\Anaconda3\lib\site-packages\keras\wrappers\scikit_learn.py in fit(self, x, y, **kwargs)
145 fit_args.update(kwargs)
146
--> 147 history = self.model.fit(x, y, **fit_args)
148
149 return history
C:\Anaconda3\lib\site-packages\keras\models.py in fit(self, x, y, batch_size, epochs, verbose, callbacks, validation_split, validation_data, shuffle, class_weight, sample_weight, initial_epoch, **kwargs)
861 class_weight=class_weight,
862 sample_weight=sample_weight,
--> 863 initial_epoch=initial_epoch)
864
865 def evaluate(self, x, y, batch_size=32, verbose=1,
C:\Anaconda3\lib\site-packages\keras\engine\training.py in fit(self, x, y, batch_size, epochs, verbose, callbacks, validation_split, validation_data, shuffle, class_weight, sample_weight, initial_epoch, **kwargs)
1428 val_f=val_f, val_ins=val_ins, shuffle=shuffle,
1429 callback_metrics=callback_metrics,
-> 1430 initial_epoch=initial_epoch)
1431
1432 def evaluate(self, x, y, batch_size=32, verbose=1, sample_weight=None):
C:\Anaconda3\lib\site-packages\keras\engine\training.py in _fit_loop(self, f, ins, out_labels, batch_size, epochs, verbose, callbacks, val_f, val_ins, shuffle, callback_metrics, initial_epoch)
1068 ins_batch = _slice_arrays(ins[:-1], batch_ids) + [ins[-1]]
1069 else:
-> 1070 ins_batch = _slice_arrays(ins, batch_ids)
1071 except TypeError:
1072 raise TypeError('TypeError while preparing batch. '
C:\Anaconda3\lib\site-packages\keras\engine\training.py in _slice_arrays(arrays, start, stop)
400 if hasattr(start, 'shape'):
401 start = start.tolist()
--> 402 return [None if x is None else x[start] for x in arrays]
403 else:
404 return [None if x is None else x[start:stop] for x in arrays]
C:\Anaconda3\lib\site-packages\keras\engine\training.py in <listcomp>(.0)
400 if hasattr(start, 'shape'):
401 start = start.tolist()
--> 402 return [None if x is None else x[start] for x in arrays]
403 else:
404 return [None if x is None else x[start:stop] for x in arrays]
KeyboardInterrupt:
In [56]:
estimators = []
estimators.append(('standardize', StandardScaler()))
estimators.append(('mlp', KerasClassifier(build_fn=create_baseline, epochs=10, batch_size=8192)))
pipeline = Pipeline(estimators)
kfold = StratifiedKFold(n_splits=10, shuffle=True, random_state=seed)
results = cross_val_score(pipeline, X, lbl_error, cv=kfold)
print("Standardized: %.2f%% (%.2f%%)" % (results.mean()*100, results.std()*100))
Epoch 1/10
784381/784381 [==============================] - 3s - loss: 0.5261 - acc: 0.7843
Epoch 2/10
784381/784381 [==============================] - 2s - loss: 0.4946 - acc: 0.7950
Epoch 3/10
784381/784381 [==============================] - 2s - loss: 0.4928 - acc: 0.7950
Epoch 4/10
784381/784381 [==============================] - 2s - loss: 0.4912 - acc: 0.7950
Epoch 5/10
784381/784381 [==============================] - 2s - loss: 0.4895 - acc: 0.7951
Epoch 6/10
784381/784381 [==============================] - 2s - loss: 0.4879 - acc: 0.7952
Epoch 7/10
784381/784381 [==============================] - 2s - loss: 0.4864 - acc: 0.7955
Epoch 8/10
784381/784381 [==============================] - 2s - loss: 0.4849 - acc: 0.7959
Epoch 9/10
784381/784381 [==============================] - 2s - loss: 0.4834 - acc: 0.7962
Epoch 10/10
784381/784381 [==============================] - 2s - loss: 0.4820 - acc: 0.7965
87154/87154 [==============================] - 1s
Epoch 1/10
784381/784381 [==============================] - 3s - loss: 0.5219 - acc: 0.7863
Epoch 2/10
784381/784381 [==============================] - 2s - loss: 0.4943 - acc: 0.7950
Epoch 3/10
784381/784381 [==============================] - 2s - loss: 0.4926 - acc: 0.7950
Epoch 4/10
784381/784381 [==============================] - 2s - loss: 0.4910 - acc: 0.7951
Epoch 5/10
784381/784381 [==============================] - 2s - loss: 0.4894 - acc: 0.7952
Epoch 6/10
784381/784381 [==============================] - 2s - loss: 0.4878 - acc: 0.7953
Epoch 7/10
784381/784381 [==============================] - 2s - loss: 0.4863 - acc: 0.7956
Epoch 8/10
784381/784381 [==============================] - 2s - loss: 0.4849 - acc: 0.7961
Epoch 9/10
784381/784381 [==============================] - 2s - loss: 0.4835 - acc: 0.7965
Epoch 10/10
784381/784381 [==============================] - 2s - loss: 0.4822 - acc: 0.7966
81920/87154 [===========================>..] - ETA: 0sEpoch 1/10
784381/784381 [==============================] - 4s - loss: 0.5197 - acc: 0.7896
Epoch 2/10
784381/784381 [==============================] - 2s - loss: 0.4940 - acc: 0.7950
Epoch 3/10
784381/784381 [==============================] - 2s - loss: 0.4923 - acc: 0.7950
Epoch 4/10
784381/784381 [==============================] - 2s - loss: 0.4906 - acc: 0.7951
Epoch 5/10
784381/784381 [==============================] - 2s - loss: 0.4889 - acc: 0.7952
Epoch 6/10
784381/784381 [==============================] - 2s - loss: 0.4873 - acc: 0.7954
Epoch 7/10
784381/784381 [==============================] - 2s - loss: 0.4859 - acc: 0.7959
Epoch 8/10
784381/784381 [==============================] - 2s - loss: 0.4846 - acc: 0.7963
Epoch 9/10
784381/784381 [==============================] - 2s - loss: 0.4834 - acc: 0.7965
Epoch 10/10
784381/784381 [==============================] - 2s - loss: 0.4823 - acc: 0.7967
87154/87154 [==============================] - 1s
Epoch 1/10
784381/784381 [==============================] - 3s - loss: 0.5188 - acc: 0.7903
Epoch 2/10
784381/784381 [==============================] - 2s - loss: 0.4940 - acc: 0.7950
Epoch 3/10
784381/784381 [==============================] - 2s - loss: 0.4923 - acc: 0.7950
Epoch 4/10
784381/784381 [==============================] - 2s - loss: 0.4906 - acc: 0.7950
Epoch 5/10
784381/784381 [==============================] - 2s - loss: 0.4890 - acc: 0.7951
Epoch 6/10
784381/784381 [==============================] - 2s - loss: 0.4877 - acc: 0.7952
Epoch 7/10
784381/784381 [==============================] - 2s - loss: 0.4863 - acc: 0.7957
Epoch 8/10
784381/784381 [==============================] - 2s - loss: 0.4852 - acc: 0.7961
Epoch 9/10
784381/784381 [==============================] - 2s - loss: 0.4842 - acc: 0.7964
Epoch 10/10
784381/784381 [==============================] - 2s - loss: 0.4834 - acc: 0.7966
87154/87154 [==============================] - 1s
Epoch 1/10
784381/784381 [==============================] - 4s - loss: 0.5268 - acc: 0.7742
Epoch 2/10
784381/784381 [==============================] - 2s - loss: 0.4942 - acc: 0.7950
Epoch 3/10
784381/784381 [==============================] - 2s - loss: 0.4926 - acc: 0.7950
Epoch 4/10
784381/784381 [==============================] - 2s - loss: 0.4910 - acc: 0.7951
Epoch 5/10
784381/784381 [==============================] - 2s - loss: 0.4893 - acc: 0.7952
Epoch 6/10
784381/784381 [==============================] - 2s - loss: 0.4878 - acc: 0.7953
Epoch 7/10
784381/784381 [==============================] - 2s - loss: 0.4863 - acc: 0.7955
Epoch 8/10
784381/784381 [==============================] - 2s - loss: 0.4849 - acc: 0.7958
Epoch 9/10
784381/784381 [==============================] - 2s - loss: 0.4836 - acc: 0.7962
Epoch 10/10
784381/784381 [==============================] - 2s - loss: 0.4825 - acc: 0.7965
87154/87154 [==============================] - 1s
Epoch 1/10
784382/784382 [==============================] - 4s - loss: 0.5207 - acc: 0.7906
Epoch 2/10
784382/784382 [==============================] - 4s - loss: 0.4938 - acc: 0.7950
Epoch 3/10
784382/784382 [==============================] - 4s - loss: 0.4921 - acc: 0.7950
Epoch 4/10
784382/784382 [==============================] - 4s - loss: 0.4905 - acc: 0.7951
Epoch 5/10
784382/784382 [==============================] - 3s - loss: 0.4889 - acc: 0.7951
Epoch 6/10
784382/784382 [==============================] - 3s - loss: 0.4873 - acc: 0.7954
Epoch 7/10
784382/784382 [==============================] - 3s - loss: 0.4859 - acc: 0.7956
Epoch 8/10
784382/784382 [==============================] - 3s - loss: 0.4845 - acc: 0.7961
Epoch 9/10
784382/784382 [==============================] - 3s - loss: 0.4835 - acc: 0.7963
Epoch 10/10
784382/784382 [==============================] - 4s - loss: 0.4825 - acc: 0.7966
73728/87153 [========================>.....] - ETA: 0sEpoch 1/10
784382/784382 [==============================] - 6s - loss: 0.5225 - acc: 0.7866
Epoch 2/10
784382/784382 [==============================] - 3s - loss: 0.4940 - acc: 0.7950
Epoch 3/10
784382/784382 [==============================] - 3s - loss: 0.4925 - acc: 0.7951
Epoch 4/10
784382/784382 [==============================] - 3s - loss: 0.4910 - acc: 0.7951
Epoch 5/10
784382/784382 [==============================] - 3s - loss: 0.4894 - acc: 0.7951
Epoch 6/10
784382/784382 [==============================] - 4s - loss: 0.4877 - acc: 0.7952
Epoch 7/10
784382/784382 [==============================] - 3s - loss: 0.4861 - acc: 0.7957
Epoch 8/10
784382/784382 [==============================] - 3s - loss: 0.4846 - acc: 0.7961 - ETA: 1s - loss: 0
Epoch 9/10
784382/784382 [==============================] - 3s - loss: 0.4831 - acc: 0.7965
Epoch 10/10
784382/784382 [==============================] - 3s - loss: 0.4820 - acc: 0.7967
65536/87153 [=====================>........] - ETA: 0sEpoch 1/10
784382/784382 [==============================] - 4s - loss: 0.5258 - acc: 0.7808
Epoch 2/10
784382/784382 [==============================] - 3s - loss: 0.4942 - acc: 0.7950
Epoch 3/10
784382/784382 [==============================] - 3s - loss: 0.4924 - acc: 0.7950
Epoch 4/10
784382/784382 [==============================] - 2s - loss: 0.4907 - acc: 0.7951
Epoch 5/10
784382/784382 [==============================] - 2s - loss: 0.4890 - acc: 0.7951
Epoch 6/10
784382/784382 [==============================] - 2s - loss: 0.4876 - acc: 0.7953
Epoch 7/10
784382/784382 [==============================] - 2s - loss: 0.4862 - acc: 0.7957
Epoch 8/10
784382/784382 [==============================] - 2s - loss: 0.4849 - acc: 0.7962
Epoch 9/10
784382/784382 [==============================] - 2s - loss: 0.4838 - acc: 0.7965
Epoch 10/10
784382/784382 [==============================] - 2s - loss: 0.4827 - acc: 0.7966
87153/87153 [==============================] - 1s
Epoch 1/10
784382/784382 [==============================] - 4s - loss: 0.5305 - acc: 0.7746
Epoch 2/10
784382/784382 [==============================] - 2s - loss: 0.4946 - acc: 0.7950
Epoch 3/10
784382/784382 [==============================] - 2s - loss: 0.4932 - acc: 0.7951
Epoch 4/10
784382/784382 [==============================] - 2s - loss: 0.4918 - acc: 0.7951
Epoch 5/10
784382/784382 [==============================] - 2s - loss: 0.4900 - acc: 0.7952
Epoch 6/10
784382/784382 [==============================] - 2s - loss: 0.4882 - acc: 0.7952
Epoch 7/10
784382/784382 [==============================] - 2s - loss: 0.4865 - acc: 0.7954
Epoch 8/10
784382/784382 [==============================] - 2s - loss: 0.4849 - acc: 0.7959
Epoch 9/10
784382/784382 [==============================] - 2s - loss: 0.4836 - acc: 0.7963
Epoch 10/10
784382/784382 [==============================] - 2s - loss: 0.4823 - acc: 0.7965
87153/87153 [==============================] - 1s
Epoch 1/10
784382/784382 [==============================] - 4s - loss: 0.5282 - acc: 0.7739
Epoch 2/10
784382/784382 [==============================] - 2s - loss: 0.4945 - acc: 0.7950
Epoch 3/10
784382/784382 [==============================] - 2s - loss: 0.4929 - acc: 0.7950
Epoch 4/10
784382/784382 [==============================] - 2s - loss: 0.4912 - acc: 0.7950
Epoch 5/10
784382/784382 [==============================] - 2s - loss: 0.4894 - acc: 0.7951
Epoch 6/10
784382/784382 [==============================] - 2s - loss: 0.4879 - acc: 0.7952
Epoch 7/10
784382/784382 [==============================] - 2s - loss: 0.4865 - acc: 0.7956
Epoch 8/10
784382/784382 [==============================] - 2s - loss: 0.4853 - acc: 0.7960
Epoch 9/10
784382/784382 [==============================] - 2s - loss: 0.4842 - acc: 0.7961
Epoch 10/10
784382/784382 [==============================] - 2s - loss: 0.4831 - acc: 0.7964
81920/87153 [===========================>..] - ETA: 0sStandardized: 79.67% (0.03%)
Content source: patemotter/trilinos-prediction
Similar notebooks: