In [81]:
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
from matplotlib import cm
from mpl_toolkits.mplot3d import Axes3D
import seaborn as sns
import os
from sklearn.preprocessing import MinMaxScaler
from sklearn.decomposition import PCA
from sklearn.metrics import classification_report,confusion_matrix,precision_recall_curve, auc,roc_auc_score, roc_curve

data obs


In [2]:
filepath = '/Users/mac/Desktop/Kaggle_datasets/Voice_Gender/'
filename01 = 'voice.csv'

df_full = pd.read_csv(os.path.join(filepath, filename01))
df_full.head()


Out[2]:
meanfreq sd median Q25 Q75 IQR skew kurt sp.ent sfm ... centroid meanfun minfun maxfun meandom mindom maxdom dfrange modindx label
0 0.059781 0.064241 0.032027 0.015071 0.090193 0.075122 12.863462 274.402906 0.893369 0.491918 ... 0.059781 0.084279 0.015702 0.275862 0.007812 0.007812 0.007812 0.000000 0.000000 male
1 0.066009 0.067310 0.040229 0.019414 0.092666 0.073252 22.423285 634.613855 0.892193 0.513724 ... 0.066009 0.107937 0.015826 0.250000 0.009014 0.007812 0.054688 0.046875 0.052632 male
2 0.077316 0.083829 0.036718 0.008701 0.131908 0.123207 30.757155 1024.927705 0.846389 0.478905 ... 0.077316 0.098706 0.015656 0.271186 0.007990 0.007812 0.015625 0.007812 0.046512 male
3 0.151228 0.072111 0.158011 0.096582 0.207955 0.111374 1.232831 4.177296 0.963322 0.727232 ... 0.151228 0.088965 0.017798 0.250000 0.201497 0.007812 0.562500 0.554688 0.247119 male
4 0.135120 0.079146 0.124656 0.078720 0.206045 0.127325 1.101174 4.333713 0.971955 0.783568 ... 0.135120 0.106398 0.016931 0.266667 0.712812 0.007812 5.484375 5.476562 0.208274 male

5 rows × 21 columns


In [3]:
df_full.info()


<class 'pandas.core.frame.DataFrame'>
RangeIndex: 3168 entries, 0 to 3167
Data columns (total 21 columns):
meanfreq    3168 non-null float64
sd          3168 non-null float64
median      3168 non-null float64
Q25         3168 non-null float64
Q75         3168 non-null float64
IQR         3168 non-null float64
skew        3168 non-null float64
kurt        3168 non-null float64
sp.ent      3168 non-null float64
sfm         3168 non-null float64
mode        3168 non-null float64
centroid    3168 non-null float64
meanfun     3168 non-null float64
minfun      3168 non-null float64
maxfun      3168 non-null float64
meandom     3168 non-null float64
mindom      3168 non-null float64
maxdom      3168 non-null float64
dfrange     3168 non-null float64
modindx     3168 non-null float64
label       3168 non-null object
dtypes: float64(20), object(1)
memory usage: 519.8+ KB

In [12]:
df_full.columns


Out[12]:
Index(['meanfreq', 'sd', 'median', 'Q25', 'Q75', 'IQR', 'skew', 'kurt',
       'sp.ent', 'sfm', 'mode', 'centroid', 'meanfun', 'minfun', 'maxfun',
       'meandom', 'mindom', 'maxdom', 'dfrange', 'modindx', 'label'],
      dtype='object')

In [7]:
sex_label = {'male':0,'female':1}
df_full['label'] = df_full['label'].map(sex_label)

In [8]:
df_full


Out[8]:
meanfreq sd median Q25 Q75 IQR skew kurt sp.ent sfm ... centroid meanfun minfun maxfun meandom mindom maxdom dfrange modindx label
0 0.059781 0.064241 0.032027 0.015071 0.090193 0.075122 12.863462 274.402906 0.893369 0.491918 ... 0.059781 0.084279 0.015702 0.275862 0.007812 0.007812 0.007812 0.000000 0.000000 0
1 0.066009 0.067310 0.040229 0.019414 0.092666 0.073252 22.423285 634.613855 0.892193 0.513724 ... 0.066009 0.107937 0.015826 0.250000 0.009014 0.007812 0.054688 0.046875 0.052632 0
2 0.077316 0.083829 0.036718 0.008701 0.131908 0.123207 30.757155 1024.927705 0.846389 0.478905 ... 0.077316 0.098706 0.015656 0.271186 0.007990 0.007812 0.015625 0.007812 0.046512 0
3 0.151228 0.072111 0.158011 0.096582 0.207955 0.111374 1.232831 4.177296 0.963322 0.727232 ... 0.151228 0.088965 0.017798 0.250000 0.201497 0.007812 0.562500 0.554688 0.247119 0
4 0.135120 0.079146 0.124656 0.078720 0.206045 0.127325 1.101174 4.333713 0.971955 0.783568 ... 0.135120 0.106398 0.016931 0.266667 0.712812 0.007812 5.484375 5.476562 0.208274 0
5 0.132786 0.079557 0.119090 0.067958 0.209592 0.141634 1.932562 8.308895 0.963181 0.738307 ... 0.132786 0.110132 0.017112 0.253968 0.298222 0.007812 2.726562 2.718750 0.125160 0
6 0.150762 0.074463 0.160106 0.092899 0.205718 0.112819 1.530643 5.987498 0.967573 0.762638 ... 0.150762 0.105945 0.026230 0.266667 0.479620 0.007812 5.312500 5.304688 0.123992 0
7 0.160514 0.076767 0.144337 0.110532 0.231962 0.121430 1.397156 4.766611 0.959255 0.719858 ... 0.160514 0.093052 0.017758 0.144144 0.301339 0.007812 0.539062 0.531250 0.283937 0
8 0.142239 0.078018 0.138587 0.088206 0.208587 0.120381 1.099746 4.070284 0.970723 0.770992 ... 0.142239 0.096729 0.017957 0.250000 0.336476 0.007812 2.164062 2.156250 0.148272 0
9 0.134329 0.080350 0.121451 0.075580 0.201957 0.126377 1.190368 4.787310 0.975246 0.804505 ... 0.134329 0.105881 0.019300 0.262295 0.340365 0.015625 4.695312 4.679688 0.089920 0
10 0.157021 0.071943 0.168160 0.101430 0.216740 0.115310 0.979442 3.974223 0.965249 0.733693 ... 0.157021 0.088894 0.022069 0.117647 0.460227 0.007812 2.812500 2.804688 0.200000 0
11 0.138551 0.077054 0.127527 0.087314 0.202739 0.115426 1.626770 6.291365 0.966004 0.752042 ... 0.138551 0.104199 0.019139 0.262295 0.246094 0.007812 2.718750 2.710938 0.132351 0
12 0.137343 0.080877 0.124263 0.083145 0.209227 0.126082 1.378728 5.008952 0.963514 0.736150 ... 0.137343 0.092644 0.016789 0.213333 0.481671 0.015625 5.015625 5.000000 0.088500 0
13 0.181225 0.060042 0.190953 0.128839 0.229532 0.100693 1.369430 5.475600 0.937446 0.537080 ... 0.181225 0.131504 0.025000 0.275862 1.277114 0.007812 2.804688 2.796875 0.416550 0
14 0.183115 0.066982 0.191233 0.129149 0.240152 0.111004 3.568104 35.384748 0.940333 0.571394 ... 0.183115 0.102799 0.020833 0.275862 1.245739 0.203125 6.742188 6.539062 0.139332 0
15 0.174272 0.069411 0.190874 0.115602 0.228279 0.112677 4.485038 61.764908 0.950972 0.635199 ... 0.174272 0.102046 0.018328 0.246154 1.621299 0.007812 7.000000 6.992188 0.209311 0
16 0.190846 0.065790 0.207951 0.132280 0.244357 0.112076 1.562304 7.834350 0.938546 0.538810 ... 0.190846 0.113323 0.017544 0.275862 1.434115 0.007812 6.320312 6.312500 0.254780 0
17 0.171247 0.074872 0.152807 0.122391 0.243617 0.121227 3.207170 25.765565 0.936954 0.586420 ... 0.171247 0.079718 0.015671 0.262295 0.106279 0.007812 0.570312 0.562500 0.138355 0
18 0.168346 0.074121 0.145618 0.115756 0.239824 0.124068 2.704335 18.484703 0.934523 0.559742 ... 0.168346 0.083484 0.015717 0.231884 0.146563 0.007812 3.125000 3.117188 0.059537 0
19 0.173631 0.073352 0.153569 0.123680 0.244234 0.120554 2.804975 20.857543 0.930917 0.518269 ... 0.173631 0.090130 0.015702 0.210526 0.193044 0.007812 2.820312 2.812500 0.068124 0
20 0.172754 0.076903 0.177736 0.120070 0.245368 0.125298 2.967765 20.078115 0.925539 0.523081 ... 0.172754 0.093574 0.015764 0.200000 0.235877 0.007812 0.718750 0.710938 0.235069 0
21 0.181015 0.074369 0.169299 0.128673 0.254175 0.125502 2.587325 12.281432 0.915284 0.475317 ... 0.181015 0.098643 0.016145 0.275862 0.209844 0.007812 3.695312 3.687500 0.059940 0
22 0.163536 0.072449 0.145543 0.113930 0.227449 0.113519 3.587650 28.653781 0.927015 0.542422 ... 0.163536 0.062542 0.015686 0.197531 0.059622 0.007812 0.445312 0.437500 0.091699 0
23 0.170213 0.075105 0.146053 0.123989 0.250126 0.126137 2.816793 13.764582 0.913832 0.487966 ... 0.170213 0.077698 0.015702 0.192771 0.101562 0.007812 0.562500 0.554688 0.161791 0
24 0.160422 0.076615 0.144824 0.120924 0.237244 0.116319 6.253208 85.491926 0.933030 0.567424 ... 0.160422 0.098944 0.016097 0.275862 0.206756 0.007812 3.953125 3.945312 0.073890 0
25 0.164700 0.075362 0.147018 0.118698 0.240475 0.121777 4.208608 43.681885 0.940669 0.604020 ... 0.164700 0.082963 0.015640 0.253968 0.143353 0.007812 1.062500 1.054688 0.125926 0
26 0.169579 0.075635 0.186468 0.116706 0.238549 0.121843 4.269923 45.895248 0.929498 0.543709 ... 0.169579 0.082451 0.016211 0.271186 0.148438 0.007812 3.609375 3.601562 0.050841 0
27 0.169021 0.071778 0.143168 0.125801 0.248315 0.122515 3.079273 14.340299 0.902275 0.477746 ... 0.169021 0.130598 0.015842 0.225352 0.335313 0.007812 0.710938 0.703125 0.397354 0
28 0.167340 0.072841 0.141739 0.122174 0.240000 0.117826 2.192126 8.152410 0.913763 0.539479 ... 0.167340 0.120052 0.016244 0.262295 0.298678 0.007812 0.679688 0.671875 0.384778 0
29 0.180528 0.070867 0.142385 0.129541 0.252477 0.122936 2.799969 12.190361 0.853115 0.313426 ... 0.180528 0.126607 0.017039 0.177778 0.234863 0.007812 0.507812 0.500000 0.329241 0
... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ...
3138 0.114477 0.081973 0.090199 0.041095 0.199900 0.158806 1.103680 3.759184 0.954130 0.689347 ... 0.114477 0.189394 0.016343 0.271186 0.566840 0.007812 4.273438 4.265625 0.183258 1
3139 0.112769 0.074424 0.094248 0.049183 0.183235 0.134052 0.945953 3.290904 0.965719 0.742464 ... 0.112769 0.169836 0.017917 0.262295 0.877604 0.007812 4.937500 4.929688 0.171708 1
3140 0.126439 0.079412 0.127325 0.046889 0.198993 0.152103 1.452173 5.582106 0.971946 0.790175 ... 0.126439 0.179613 0.029575 0.266667 0.614110 0.007812 4.914062 4.906250 0.090045 1
3141 0.117350 0.090035 0.109478 0.024017 0.203946 0.179929 2.610623 12.442898 0.953624 0.701434 ... 0.117350 0.178040 0.016512 0.271186 0.188721 0.007812 0.750000 0.742188 0.277759 1
3142 0.104793 0.085201 0.077886 0.028388 0.186101 0.157712 2.419127 11.281968 0.956977 0.718462 ... 0.104793 0.183565 0.016444 0.275862 0.297953 0.007812 0.859375 0.851562 0.370904 1
3143 0.127633 0.084931 0.158892 0.034531 0.201430 0.166899 1.591174 5.347645 0.949757 0.671247 ... 0.127633 0.178363 0.058182 0.271186 0.634014 0.015625 5.031250 5.015625 0.121246 1
3144 0.091250 0.086956 0.048191 0.015193 0.179043 0.163851 3.089787 12.857558 0.930715 0.622816 ... 0.091250 0.170893 0.016178 0.275862 0.767314 0.007812 5.289062 5.281250 0.187912 1
3145 0.082404 0.085136 0.035114 0.016920 0.152827 0.135906 2.570944 9.179264 0.921649 0.576089 ... 0.082404 0.183387 0.034043 0.275862 0.328962 0.007812 0.750000 0.742188 0.445053 1
3146 0.124695 0.080989 0.131882 0.042033 0.197268 0.155234 1.970756 8.000504 0.958531 0.721682 ... 0.124695 0.182513 0.068966 0.238806 0.293527 0.007812 0.851562 0.843750 0.396091 1
3147 0.131566 0.084354 0.131889 0.053093 0.196147 0.143055 2.243370 11.544740 0.968324 0.784108 ... 0.131566 0.191163 0.029144 0.275862 0.214725 0.007812 0.796875 0.789062 0.351645 1
3148 0.108888 0.092021 0.070063 0.022520 0.201180 0.178660 2.235435 8.528681 0.947621 0.679795 ... 0.108888 0.160473 0.019512 0.275862 0.497721 0.007812 2.945312 2.937500 0.236240 1
3149 0.090445 0.079045 0.059358 0.020893 0.167727 0.146834 2.187161 8.221164 0.942404 0.628992 ... 0.090445 0.182431 0.026622 0.258065 0.735453 0.007812 5.531250 5.523438 0.170489 1
3150 0.137507 0.091521 0.161298 0.043547 0.221260 0.177713 1.119608 4.185207 0.967706 0.739008 ... 0.137507 0.190093 0.019116 0.275862 0.354367 0.007812 3.117188 3.109375 0.096069 1
3151 0.113148 0.090335 0.084335 0.026622 0.198830 0.172207 2.258273 9.579337 0.957433 0.717683 ... 0.113148 0.187444 0.023495 0.262295 0.622489 0.007812 4.898438 4.890625 0.128717 1
3152 0.149731 0.082852 0.180932 0.060212 0.219788 0.159576 1.240037 4.019385 0.949787 0.652936 ... 0.149731 0.183974 0.051948 0.253968 1.361213 0.203125 6.031250 5.828125 0.365700 1
3153 0.189614 0.035933 0.194116 0.168434 0.205289 0.036855 2.724415 10.986864 0.871215 0.236684 ... 0.189614 0.163059 0.029685 0.258065 1.370192 0.164062 7.000000 6.835938 0.235948 1
3154 0.200097 0.045533 0.203796 0.176581 0.232133 0.055552 1.160197 3.733815 0.919607 0.357144 ... 0.200097 0.168531 0.063241 0.262295 0.718750 0.148438 7.000000 6.851562 0.092208 1
3155 0.178573 0.046679 0.164388 0.149309 0.204601 0.055293 3.066668 15.684088 0.891448 0.321169 ... 0.178573 0.155380 0.025478 0.253968 0.637921 0.148438 6.148438 6.000000 0.101291 1
3156 0.201806 0.036057 0.201622 0.178165 0.227872 0.049707 1.585353 4.945634 0.884731 0.227903 ... 0.201806 0.191704 0.032720 0.275862 0.593750 0.007812 5.921875 5.914062 0.124383 1
3157 0.203627 0.041529 0.204104 0.175661 0.239122 0.063461 1.462972 4.790370 0.903458 0.246953 ... 0.203627 0.146783 0.020566 0.262295 0.875558 0.171875 6.898438 6.726562 0.145534 1
3158 0.183667 0.040607 0.182534 0.156480 0.207646 0.051166 2.054138 7.483019 0.898138 0.313925 ... 0.183667 0.149237 0.018648 0.262295 0.550312 0.007812 3.421875 3.414062 0.166503 1
3159 0.168794 0.085842 0.188980 0.095558 0.240229 0.144671 1.462248 5.077956 0.956201 0.706861 ... 0.168794 0.182863 0.020699 0.271186 0.988281 0.007812 5.882812 5.875000 0.268617 1
3160 0.151771 0.089147 0.185970 0.058159 0.230199 0.172040 1.227710 4.304354 0.962045 0.744590 ... 0.151771 0.201600 0.023426 0.266667 0.766741 0.007812 4.007812 4.000000 0.192220 1
3161 0.170656 0.081237 0.184277 0.113012 0.239096 0.126084 1.378256 5.431663 0.950750 0.658558 ... 0.170656 0.198475 0.160000 0.253968 0.414062 0.007812 0.734375 0.726562 0.336918 1
3162 0.146023 0.092525 0.183434 0.041747 0.224337 0.182590 1.384981 5.118927 0.948999 0.659825 ... 0.146023 0.195640 0.039506 0.275862 0.533854 0.007812 2.992188 2.984375 0.258924 1
3163 0.131884 0.084734 0.153707 0.049285 0.201144 0.151859 1.762129 6.630383 0.962934 0.763182 ... 0.131884 0.182790 0.083770 0.262295 0.832899 0.007812 4.210938 4.203125 0.161929 1
3164 0.116221 0.089221 0.076758 0.042718 0.204911 0.162193 0.693730 2.503954 0.960716 0.709570 ... 0.116221 0.188980 0.034409 0.275862 0.909856 0.039062 3.679688 3.640625 0.277897 1
3165 0.142056 0.095798 0.183731 0.033424 0.224360 0.190936 1.876502 6.604509 0.946854 0.654196 ... 0.142056 0.209918 0.039506 0.275862 0.494271 0.007812 2.937500 2.929688 0.194759 1
3166 0.143659 0.090628 0.184976 0.043508 0.219943 0.176435 1.591065 5.388298 0.950436 0.675470 ... 0.143659 0.172375 0.034483 0.250000 0.791360 0.007812 3.593750 3.585938 0.311002 1
3167 0.165509 0.092884 0.183044 0.070072 0.250827 0.180756 1.705029 5.769115 0.938829 0.601529 ... 0.165509 0.185607 0.062257 0.271186 0.227022 0.007812 0.554688 0.546875 0.350000 1

3168 rows × 21 columns


In [9]:
df_full['label'].value_counts() #相當均勻的數據組


Out[9]:
1    1584
0    1584
Name: label, dtype: int64

In [11]:
k = 10 #number of variables for heatmap
corrmat = df_full.corr()
cols = corrmat.nlargest(k, 'label')['label'].index
cm = np.corrcoef(df_full[cols].values.T)

plt.figure(figsize=(15,15)) #可以調整大小
sns.set(font_scale=1.25)
hm = sns.heatmap(cm, cbar=True, annot=True, square=True, fmt='.2f', annot_kws={'size': 10},
                 yticklabels = cols.values, xticklabels = cols.values, cmap='rainbow')
plt.show()



In [15]:
cols = ['meanfreq', 'sd', 'median', 'Q25', 'Q75', 'IQR', 'skew', 'kurt',
       'sp.ent', 'sfm', 'mode', 'centroid', 'meanfun', 'minfun', 'maxfun',
       'meandom', 'mindom', 'maxdom', 'dfrange', 'modindx']

for col in cols:
    facet = sns.FacetGrid(df_full, hue='label', aspect=4, size=4)
    facet.map(sns.kdeplot, col ,shade= True)
    facet.set()
    facet.add_legend()
    plt.show()


//anaconda/lib/python3.5/site-packages/matplotlib/pyplot.py:524: RuntimeWarning: More than 20 figures have been opened. Figures created through the pyplot interface (`matplotlib.pyplot.figure`) are retained until explicitly closed and may consume too much memory. (To control this warning, see the rcParam `figure.max_open_warning`).
  max_open_warning, RuntimeWarning)

data preprocessing


In [17]:
from sklearn.utils import shuffle

shuffle_df = shuffle(df_full, random_state=42)

df_label = shuffle_df['label']
df_feature = shuffle_df.drop('label', axis=1)

cut_point = round(len(df_full)*0.6)
train_feature = np.array(df_feature.values[:cut_point,:])
train_label = np.array(df_label.values[:cut_point])
test_feature = np.array(df_feature.values[cut_point:,:])
test_label = np.array(df_label.values[cut_point:])

In [18]:
scaler = MinMaxScaler()
scaler.fit(train_feature)
train_feature_trans = scaler.transform(train_feature)
test_feature_trans = scaler.transform(test_feature)

Keras: MLP 效果不錯


In [19]:
from keras.models import Sequential
from keras.layers import Dense
from keras.layers import Dropout

def show_train_history(train_history,train,validation):
    plt.plot(train_history.history[train])
    plt.plot(train_history.history[validation])
    plt.title('Train History')
    plt.ylabel(train)
    plt.xlabel('Epoch')
    plt.legend(['train', 'validation'], loc='best')
    plt.show()

model = Sequential() 
model.add(Dense(units=200, 
                input_dim=20, 
                kernel_initializer='uniform', 
                activation='relu'))
model.add(Dropout(0.5))

model.add(Dense(units=200,  
                kernel_initializer='uniform', 
                activation='relu'))
model.add(Dropout(0.5))

model.add(Dense(units=1, #輸出一個數字 
                kernel_initializer='uniform',
                activation='sigmoid'))

print(model.summary()) #可以清楚看到model還有參數數量

model.compile(loss='binary_crossentropy',   #二元用binary
              optimizer='adam', metrics=['accuracy'])

train_history = model.fit(x=train_feature_trans, y=train_label,  #上面多分割一步在keras是內建的
                          validation_split=0.8, epochs=300, 
                          batch_size=2000, verbose=1) #verbose=2表示顯示訓練過程

show_train_history(train_history,'acc','val_acc')
show_train_history(train_history,'loss','val_loss')

scores = model.evaluate(test_feature_trans, test_label)
print('\n')
print('accuracy=',scores[1])

prediction = model.predict_classes(test_feature_trans)
#model.save_weights("Keras_VoiceGender_MLP.h5")
#print('model saved to disk')


Using TensorFlow backend.
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
dense_1 (Dense)              (None, 200)               4200      
_________________________________________________________________
dropout_1 (Dropout)          (None, 200)               0         
_________________________________________________________________
dense_2 (Dense)              (None, 200)               40200     
_________________________________________________________________
dropout_2 (Dropout)          (None, 200)               0         
_________________________________________________________________
dense_3 (Dense)              (None, 1)                 201       
=================================================================
Total params: 44,601
Trainable params: 44,601
Non-trainable params: 0
_________________________________________________________________
None
Train on 380 samples, validate on 1521 samples
Epoch 1/300
380/380 [==============================] - 0s - loss: 0.6931 - acc: 0.5395 - val_loss: 0.6918 - val_acc: 0.5930
Epoch 2/300
380/380 [==============================] - 0s - loss: 0.6920 - acc: 0.5605 - val_loss: 0.6904 - val_acc: 0.6463
Epoch 3/300
380/380 [==============================] - 0s - loss: 0.6904 - acc: 0.6447 - val_loss: 0.6888 - val_acc: 0.6719
Epoch 4/300
380/380 [==============================] - 0s - loss: 0.6883 - acc: 0.7368 - val_loss: 0.6870 - val_acc: 0.6976
Epoch 5/300
380/380 [==============================] - 0s - loss: 0.6862 - acc: 0.7526 - val_loss: 0.6849 - val_acc: 0.7179
Epoch 6/300
380/380 [==============================] - 0s - loss: 0.6850 - acc: 0.7184 - val_loss: 0.6826 - val_acc: 0.7396
Epoch 7/300
380/380 [==============================] - 0s - loss: 0.6816 - acc: 0.7737 - val_loss: 0.6799 - val_acc: 0.7594
Epoch 8/300
380/380 [==============================] - 0s - loss: 0.6785 - acc: 0.7842 - val_loss: 0.6768 - val_acc: 0.7844
Epoch 9/300
380/380 [==============================] - 0s - loss: 0.6751 - acc: 0.7842 - val_loss: 0.6733 - val_acc: 0.8100
Epoch 10/300
380/380 [==============================] - 0s - loss: 0.6721 - acc: 0.7842 - val_loss: 0.6692 - val_acc: 0.8153
Epoch 11/300
380/380 [==============================] - 0s - loss: 0.6676 - acc: 0.8026 - val_loss: 0.6646 - val_acc: 0.8212
Epoch 12/300
380/380 [==============================] - 0s - loss: 0.6614 - acc: 0.7974 - val_loss: 0.6595 - val_acc: 0.8212
Epoch 13/300
380/380 [==============================] - 0s - loss: 0.6546 - acc: 0.8184 - val_loss: 0.6537 - val_acc: 0.8218
Epoch 14/300
380/380 [==============================] - 0s - loss: 0.6470 - acc: 0.8263 - val_loss: 0.6473 - val_acc: 0.8218
Epoch 15/300
380/380 [==============================] - 0s - loss: 0.6426 - acc: 0.8053 - val_loss: 0.6403 - val_acc: 0.8205
Epoch 16/300
380/380 [==============================] - 0s - loss: 0.6334 - acc: 0.8289 - val_loss: 0.6326 - val_acc: 0.8179
Epoch 17/300
380/380 [==============================] - 0s - loss: 0.6265 - acc: 0.8105 - val_loss: 0.6244 - val_acc: 0.8120
Epoch 18/300
380/380 [==============================] - 0s - loss: 0.6099 - acc: 0.8158 - val_loss: 0.6156 - val_acc: 0.8100
Epoch 19/300
380/380 [==============================] - 0s - loss: 0.6054 - acc: 0.8079 - val_loss: 0.6063 - val_acc: 0.8080
Epoch 20/300
380/380 [==============================] - 0s - loss: 0.5928 - acc: 0.8132 - val_loss: 0.5969 - val_acc: 0.8060
Epoch 21/300
380/380 [==============================] - 0s - loss: 0.5823 - acc: 0.8132 - val_loss: 0.5871 - val_acc: 0.7995
Epoch 22/300
380/380 [==============================] - 0s - loss: 0.5732 - acc: 0.8026 - val_loss: 0.5775 - val_acc: 0.8001
Epoch 23/300
380/380 [==============================] - 0s - loss: 0.5615 - acc: 0.8105 - val_loss: 0.5677 - val_acc: 0.8041
Epoch 24/300
380/380 [==============================] - 0s - loss: 0.5472 - acc: 0.8237 - val_loss: 0.5580 - val_acc: 0.8060
Epoch 25/300
380/380 [==============================] - 0s - loss: 0.5362 - acc: 0.8158 - val_loss: 0.5487 - val_acc: 0.8093
Epoch 26/300
380/380 [==============================] - 0s - loss: 0.5146 - acc: 0.8211 - val_loss: 0.5398 - val_acc: 0.8107
Epoch 27/300
380/380 [==============================] - 0s - loss: 0.5167 - acc: 0.8237 - val_loss: 0.5315 - val_acc: 0.8126
Epoch 28/300
380/380 [==============================] - 0s - loss: 0.5004 - acc: 0.8289 - val_loss: 0.5235 - val_acc: 0.8126
Epoch 29/300
380/380 [==============================] - 0s - loss: 0.4849 - acc: 0.8368 - val_loss: 0.5163 - val_acc: 0.8126
Epoch 30/300
380/380 [==============================] - 0s - loss: 0.4816 - acc: 0.8289 - val_loss: 0.5098 - val_acc: 0.8047
Epoch 31/300
380/380 [==============================] - 0s - loss: 0.4676 - acc: 0.8263 - val_loss: 0.5039 - val_acc: 0.8080
Epoch 32/300
380/380 [==============================] - 0s - loss: 0.4656 - acc: 0.8158 - val_loss: 0.4983 - val_acc: 0.8153
Epoch 33/300
380/380 [==============================] - 0s - loss: 0.4449 - acc: 0.8395 - val_loss: 0.4930 - val_acc: 0.8231
Epoch 34/300
380/380 [==============================] - 0s - loss: 0.4427 - acc: 0.8263 - val_loss: 0.4880 - val_acc: 0.8284
Epoch 35/300
380/380 [==============================] - 0s - loss: 0.4299 - acc: 0.8368 - val_loss: 0.4829 - val_acc: 0.8330
Epoch 36/300
380/380 [==============================] - 0s - loss: 0.4381 - acc: 0.8342 - val_loss: 0.4774 - val_acc: 0.8376
Epoch 37/300
380/380 [==============================] - 0s - loss: 0.4275 - acc: 0.8421 - val_loss: 0.4714 - val_acc: 0.8422
Epoch 38/300
380/380 [==============================] - 0s - loss: 0.4254 - acc: 0.8395 - val_loss: 0.4648 - val_acc: 0.8448
Epoch 39/300
380/380 [==============================] - 0s - loss: 0.4288 - acc: 0.8289 - val_loss: 0.4578 - val_acc: 0.8508
Epoch 40/300
380/380 [==============================] - 0s - loss: 0.4163 - acc: 0.8368 - val_loss: 0.4505 - val_acc: 0.8514
Epoch 41/300
380/380 [==============================] - 0s - loss: 0.3937 - acc: 0.8737 - val_loss: 0.4429 - val_acc: 0.8560
Epoch 42/300
380/380 [==============================] - 0s - loss: 0.3928 - acc: 0.8526 - val_loss: 0.4346 - val_acc: 0.8639
Epoch 43/300
380/380 [==============================] - 0s - loss: 0.3960 - acc: 0.8711 - val_loss: 0.4260 - val_acc: 0.8659
Epoch 44/300
380/380 [==============================] - 0s - loss: 0.3855 - acc: 0.8763 - val_loss: 0.4176 - val_acc: 0.8652
Epoch 45/300
380/380 [==============================] - 0s - loss: 0.3610 - acc: 0.8789 - val_loss: 0.4093 - val_acc: 0.8665
Epoch 46/300
380/380 [==============================] - 0s - loss: 0.3639 - acc: 0.8842 - val_loss: 0.4010 - val_acc: 0.8685
Epoch 47/300
380/380 [==============================] - 0s - loss: 0.3584 - acc: 0.8868 - val_loss: 0.3923 - val_acc: 0.8705
Epoch 48/300
380/380 [==============================] - 0s - loss: 0.3557 - acc: 0.8947 - val_loss: 0.3835 - val_acc: 0.8757
Epoch 49/300
380/380 [==============================] - 0s - loss: 0.3446 - acc: 0.8974 - val_loss: 0.3754 - val_acc: 0.8784
Epoch 50/300
380/380 [==============================] - 0s - loss: 0.3329 - acc: 0.8895 - val_loss: 0.3678 - val_acc: 0.8803
Epoch 51/300
380/380 [==============================] - 0s - loss: 0.3372 - acc: 0.9053 - val_loss: 0.3603 - val_acc: 0.8810
Epoch 52/300
380/380 [==============================] - 0s - loss: 0.3125 - acc: 0.8974 - val_loss: 0.3533 - val_acc: 0.8817
Epoch 53/300
380/380 [==============================] - 0s - loss: 0.3072 - acc: 0.9026 - val_loss: 0.3469 - val_acc: 0.8823
Epoch 54/300
380/380 [==============================] - 0s - loss: 0.3147 - acc: 0.8947 - val_loss: 0.3405 - val_acc: 0.8849
Epoch 55/300
380/380 [==============================] - 0s - loss: 0.3119 - acc: 0.9026 - val_loss: 0.3341 - val_acc: 0.8856
Epoch 56/300
380/380 [==============================] - 0s - loss: 0.2915 - acc: 0.9105 - val_loss: 0.3277 - val_acc: 0.8876
Epoch 57/300
380/380 [==============================] - 0s - loss: 0.2931 - acc: 0.9132 - val_loss: 0.3215 - val_acc: 0.8895
Epoch 58/300
380/380 [==============================] - 0s - loss: 0.2776 - acc: 0.9132 - val_loss: 0.3157 - val_acc: 0.8902
Epoch 59/300
380/380 [==============================] - 0s - loss: 0.2799 - acc: 0.9184 - val_loss: 0.3101 - val_acc: 0.8909
Epoch 60/300
380/380 [==============================] - 0s - loss: 0.2760 - acc: 0.9132 - val_loss: 0.3045 - val_acc: 0.8922
Epoch 61/300
380/380 [==============================] - 0s - loss: 0.2671 - acc: 0.9079 - val_loss: 0.2989 - val_acc: 0.8948
Epoch 62/300
380/380 [==============================] - 0s - loss: 0.2569 - acc: 0.9211 - val_loss: 0.2934 - val_acc: 0.8941
Epoch 63/300
380/380 [==============================] - 0s - loss: 0.2524 - acc: 0.9184 - val_loss: 0.2880 - val_acc: 0.8935
Epoch 64/300
380/380 [==============================] - 0s - loss: 0.2559 - acc: 0.9132 - val_loss: 0.2828 - val_acc: 0.8948
Epoch 65/300
380/380 [==============================] - 0s - loss: 0.2503 - acc: 0.9079 - val_loss: 0.2772 - val_acc: 0.8961
Epoch 66/300
380/380 [==============================] - 0s - loss: 0.2371 - acc: 0.9211 - val_loss: 0.2713 - val_acc: 0.8961
Epoch 67/300
380/380 [==============================] - 0s - loss: 0.2328 - acc: 0.9211 - val_loss: 0.2654 - val_acc: 0.8961
Epoch 68/300
380/380 [==============================] - 0s - loss: 0.2297 - acc: 0.9184 - val_loss: 0.2587 - val_acc: 0.8968
Epoch 69/300
380/380 [==============================] - 0s - loss: 0.2179 - acc: 0.9263 - val_loss: 0.2522 - val_acc: 0.8981
Epoch 70/300
380/380 [==============================] - 0s - loss: 0.2239 - acc: 0.9263 - val_loss: 0.2450 - val_acc: 0.9007
Epoch 71/300
380/380 [==============================] - 0s - loss: 0.2031 - acc: 0.9316 - val_loss: 0.2381 - val_acc: 0.9027
Epoch 72/300
380/380 [==============================] - 0s - loss: 0.2029 - acc: 0.9289 - val_loss: 0.2318 - val_acc: 0.9053
Epoch 73/300
380/380 [==============================] - 0s - loss: 0.2010 - acc: 0.9289 - val_loss: 0.2261 - val_acc: 0.9060
Epoch 74/300
380/380 [==============================] - 0s - loss: 0.1816 - acc: 0.9395 - val_loss: 0.2220 - val_acc: 0.9086
Epoch 75/300
380/380 [==============================] - 0s - loss: 0.1991 - acc: 0.9289 - val_loss: 0.2177 - val_acc: 0.9119
Epoch 76/300
380/380 [==============================] - 0s - loss: 0.1794 - acc: 0.9447 - val_loss: 0.2125 - val_acc: 0.9126
Epoch 77/300
380/380 [==============================] - 0s - loss: 0.1860 - acc: 0.9395 - val_loss: 0.2062 - val_acc: 0.9139
Epoch 78/300
380/380 [==============================] - 0s - loss: 0.1866 - acc: 0.9368 - val_loss: 0.1991 - val_acc: 0.9178
Epoch 79/300
380/380 [==============================] - 0s - loss: 0.1725 - acc: 0.9421 - val_loss: 0.1923 - val_acc: 0.9198
Epoch 80/300
380/380 [==============================] - 0s - loss: 0.1756 - acc: 0.9368 - val_loss: 0.1874 - val_acc: 0.9231
Epoch 81/300
380/380 [==============================] - 0s - loss: 0.1706 - acc: 0.9421 - val_loss: 0.1841 - val_acc: 0.9250
Epoch 82/300
380/380 [==============================] - 0s - loss: 0.1497 - acc: 0.9553 - val_loss: 0.1820 - val_acc: 0.9270
Epoch 83/300
380/380 [==============================] - 0s - loss: 0.1359 - acc: 0.9526 - val_loss: 0.1796 - val_acc: 0.9264
Epoch 84/300
380/380 [==============================] - 0s - loss: 0.1499 - acc: 0.9553 - val_loss: 0.1723 - val_acc: 0.9283
Epoch 85/300
380/380 [==============================] - 0s - loss: 0.1434 - acc: 0.9526 - val_loss: 0.1653 - val_acc: 0.9329
Epoch 86/300
380/380 [==============================] - 0s - loss: 0.1363 - acc: 0.9526 - val_loss: 0.1610 - val_acc: 0.9362
Epoch 87/300
380/380 [==============================] - 0s - loss: 0.1437 - acc: 0.9526 - val_loss: 0.1577 - val_acc: 0.9375
Epoch 88/300
380/380 [==============================] - 0s - loss: 0.1377 - acc: 0.9658 - val_loss: 0.1557 - val_acc: 0.9382
Epoch 89/300
380/380 [==============================] - 0s - loss: 0.1309 - acc: 0.9526 - val_loss: 0.1544 - val_acc: 0.9382
Epoch 90/300
380/380 [==============================] - 0s - loss: 0.1419 - acc: 0.9526 - val_loss: 0.1507 - val_acc: 0.9408
Epoch 91/300
380/380 [==============================] - 0s - loss: 0.1278 - acc: 0.9500 - val_loss: 0.1464 - val_acc: 0.9428
Epoch 92/300
380/380 [==============================] - 0s - loss: 0.1200 - acc: 0.9658 - val_loss: 0.1428 - val_acc: 0.9448
Epoch 93/300
380/380 [==============================] - 0s - loss: 0.1425 - acc: 0.9526 - val_loss: 0.1394 - val_acc: 0.9481
Epoch 94/300
380/380 [==============================] - 0s - loss: 0.1138 - acc: 0.9632 - val_loss: 0.1373 - val_acc: 0.9494
Epoch 95/300
380/380 [==============================] - 0s - loss: 0.1278 - acc: 0.9605 - val_loss: 0.1348 - val_acc: 0.9513
Epoch 96/300
380/380 [==============================] - 0s - loss: 0.1247 - acc: 0.9605 - val_loss: 0.1314 - val_acc: 0.9553
Epoch 97/300
380/380 [==============================] - 0s - loss: 0.1314 - acc: 0.9553 - val_loss: 0.1288 - val_acc: 0.9546
Epoch 98/300
380/380 [==============================] - 0s - loss: 0.1162 - acc: 0.9553 - val_loss: 0.1272 - val_acc: 0.9560
Epoch 99/300
380/380 [==============================] - 0s - loss: 0.1176 - acc: 0.9711 - val_loss: 0.1283 - val_acc: 0.9533
Epoch 100/300
380/380 [==============================] - 0s - loss: 0.1215 - acc: 0.9500 - val_loss: 0.1274 - val_acc: 0.9540
Epoch 101/300
380/380 [==============================] - 0s - loss: 0.1104 - acc: 0.9684 - val_loss: 0.1239 - val_acc: 0.9546
Epoch 102/300
380/380 [==============================] - 0s - loss: 0.1076 - acc: 0.9632 - val_loss: 0.1208 - val_acc: 0.9573
Epoch 103/300
380/380 [==============================] - 0s - loss: 0.1127 - acc: 0.9605 - val_loss: 0.1182 - val_acc: 0.9599
Epoch 104/300
380/380 [==============================] - 0s - loss: 0.1036 - acc: 0.9658 - val_loss: 0.1166 - val_acc: 0.9599
Epoch 105/300
380/380 [==============================] - 0s - loss: 0.0997 - acc: 0.9684 - val_loss: 0.1161 - val_acc: 0.9612
Epoch 106/300
380/380 [==============================] - 0s - loss: 0.1046 - acc: 0.9684 - val_loss: 0.1170 - val_acc: 0.9579
Epoch 107/300
380/380 [==============================] - 0s - loss: 0.1041 - acc: 0.9658 - val_loss: 0.1161 - val_acc: 0.9579
Epoch 108/300
380/380 [==============================] - 0s - loss: 0.0894 - acc: 0.9737 - val_loss: 0.1128 - val_acc: 0.9619
Epoch 109/300
380/380 [==============================] - 0s - loss: 0.0906 - acc: 0.9789 - val_loss: 0.1099 - val_acc: 0.9619
Epoch 110/300
380/380 [==============================] - 0s - loss: 0.0909 - acc: 0.9789 - val_loss: 0.1083 - val_acc: 0.9632
Epoch 111/300
380/380 [==============================] - 0s - loss: 0.0954 - acc: 0.9711 - val_loss: 0.1070 - val_acc: 0.9652
Epoch 112/300
380/380 [==============================] - 0s - loss: 0.1043 - acc: 0.9711 - val_loss: 0.1060 - val_acc: 0.9652
Epoch 113/300
380/380 [==============================] - 0s - loss: 0.0936 - acc: 0.9763 - val_loss: 0.1054 - val_acc: 0.9652
Epoch 114/300
380/380 [==============================] - 0s - loss: 0.0902 - acc: 0.9763 - val_loss: 0.1051 - val_acc: 0.9632
Epoch 115/300
380/380 [==============================] - 0s - loss: 0.0863 - acc: 0.9763 - val_loss: 0.1057 - val_acc: 0.9632
Epoch 116/300
380/380 [==============================] - 0s - loss: 0.0807 - acc: 0.9789 - val_loss: 0.1059 - val_acc: 0.9625
Epoch 117/300
380/380 [==============================] - 0s - loss: 0.0992 - acc: 0.9605 - val_loss: 0.1043 - val_acc: 0.9625
Epoch 118/300
380/380 [==============================] - 0s - loss: 0.0852 - acc: 0.9711 - val_loss: 0.1024 - val_acc: 0.9625
Epoch 119/300
380/380 [==============================] - 0s - loss: 0.0881 - acc: 0.9737 - val_loss: 0.1013 - val_acc: 0.9645
Epoch 120/300
380/380 [==============================] - 0s - loss: 0.1001 - acc: 0.9763 - val_loss: 0.1005 - val_acc: 0.9658
Epoch 121/300
380/380 [==============================] - 0s - loss: 0.0901 - acc: 0.9763 - val_loss: 0.1000 - val_acc: 0.9638
Epoch 122/300
380/380 [==============================] - 0s - loss: 0.0877 - acc: 0.9711 - val_loss: 0.1001 - val_acc: 0.9638
Epoch 123/300
380/380 [==============================] - 0s - loss: 0.0795 - acc: 0.9711 - val_loss: 0.1003 - val_acc: 0.9625
Epoch 124/300
380/380 [==============================] - 0s - loss: 0.0947 - acc: 0.9737 - val_loss: 0.0997 - val_acc: 0.9632
Epoch 125/300
380/380 [==============================] - 0s - loss: 0.0868 - acc: 0.9763 - val_loss: 0.0983 - val_acc: 0.9645
Epoch 126/300
380/380 [==============================] - 0s - loss: 0.0762 - acc: 0.9763 - val_loss: 0.0977 - val_acc: 0.9645
Epoch 127/300
380/380 [==============================] - 0s - loss: 0.0901 - acc: 0.9711 - val_loss: 0.0975 - val_acc: 0.9652
Epoch 128/300
380/380 [==============================] - 0s - loss: 0.0721 - acc: 0.9789 - val_loss: 0.0975 - val_acc: 0.9632
Epoch 129/300
380/380 [==============================] - 0s - loss: 0.0775 - acc: 0.9789 - val_loss: 0.0992 - val_acc: 0.9632
Epoch 130/300
380/380 [==============================] - 0s - loss: 0.0771 - acc: 0.9684 - val_loss: 0.1012 - val_acc: 0.9638
Epoch 131/300
380/380 [==============================] - 0s - loss: 0.0965 - acc: 0.9658 - val_loss: 0.0994 - val_acc: 0.9638
Epoch 132/300
380/380 [==============================] - 0s - loss: 0.0821 - acc: 0.9763 - val_loss: 0.0971 - val_acc: 0.9638
Epoch 133/300
380/380 [==============================] - 0s - loss: 0.0912 - acc: 0.9711 - val_loss: 0.0960 - val_acc: 0.9645
Epoch 134/300
380/380 [==============================] - 0s - loss: 0.0789 - acc: 0.9842 - val_loss: 0.0956 - val_acc: 0.9671
Epoch 135/300
380/380 [==============================] - 0s - loss: 0.0783 - acc: 0.9816 - val_loss: 0.0955 - val_acc: 0.9671
Epoch 136/300
380/380 [==============================] - 0s - loss: 0.0781 - acc: 0.9737 - val_loss: 0.0955 - val_acc: 0.9658
Epoch 137/300
380/380 [==============================] - 0s - loss: 0.0748 - acc: 0.9737 - val_loss: 0.0964 - val_acc: 0.9632
Epoch 138/300
380/380 [==============================] - 0s - loss: 0.0837 - acc: 0.9711 - val_loss: 0.0972 - val_acc: 0.9638
Epoch 139/300
380/380 [==============================] - 0s - loss: 0.0731 - acc: 0.9789 - val_loss: 0.0963 - val_acc: 0.9632
Epoch 140/300
380/380 [==============================] - 0s - loss: 0.0702 - acc: 0.9816 - val_loss: 0.0951 - val_acc: 0.9652
Epoch 141/300
380/380 [==============================] - 0s - loss: 0.0634 - acc: 0.9816 - val_loss: 0.0949 - val_acc: 0.9652
Epoch 142/300
380/380 [==============================] - 0s - loss: 0.0702 - acc: 0.9816 - val_loss: 0.0956 - val_acc: 0.9638
Epoch 143/300
380/380 [==============================] - 0s - loss: 0.0752 - acc: 0.9763 - val_loss: 0.0972 - val_acc: 0.9625
Epoch 144/300
380/380 [==============================] - 0s - loss: 0.0800 - acc: 0.9711 - val_loss: 0.0990 - val_acc: 0.9632
Epoch 145/300
380/380 [==============================] - 0s - loss: 0.0726 - acc: 0.9737 - val_loss: 0.0966 - val_acc: 0.9625
Epoch 146/300
380/380 [==============================] - 0s - loss: 0.0691 - acc: 0.9816 - val_loss: 0.0942 - val_acc: 0.9652
Epoch 147/300
380/380 [==============================] - 0s - loss: 0.0760 - acc: 0.9789 - val_loss: 0.0939 - val_acc: 0.9658
Epoch 148/300
380/380 [==============================] - 0s - loss: 0.0701 - acc: 0.9789 - val_loss: 0.0947 - val_acc: 0.9684
Epoch 149/300
380/380 [==============================] - 0s - loss: 0.0731 - acc: 0.9789 - val_loss: 0.0937 - val_acc: 0.9658
Epoch 150/300
380/380 [==============================] - 0s - loss: 0.0616 - acc: 0.9763 - val_loss: 0.0946 - val_acc: 0.9645
Epoch 151/300
380/380 [==============================] - 0s - loss: 0.0647 - acc: 0.9763 - val_loss: 0.0994 - val_acc: 0.9632
Epoch 152/300
380/380 [==============================] - 0s - loss: 0.0778 - acc: 0.9763 - val_loss: 0.1020 - val_acc: 0.9638
Epoch 153/300
380/380 [==============================] - 0s - loss: 0.0786 - acc: 0.9737 - val_loss: 0.0981 - val_acc: 0.9632
Epoch 154/300
380/380 [==============================] - 0s - loss: 0.0637 - acc: 0.9816 - val_loss: 0.0939 - val_acc: 0.9645
Epoch 155/300
380/380 [==============================] - 0s - loss: 0.0599 - acc: 0.9816 - val_loss: 0.0933 - val_acc: 0.9658
Epoch 156/300
380/380 [==============================] - 0s - loss: 0.0663 - acc: 0.9816 - val_loss: 0.0931 - val_acc: 0.9652
Epoch 157/300
380/380 [==============================] - 0s - loss: 0.0668 - acc: 0.9763 - val_loss: 0.0939 - val_acc: 0.9645
Epoch 158/300
380/380 [==============================] - 0s - loss: 0.0624 - acc: 0.9868 - val_loss: 0.0976 - val_acc: 0.9619
Epoch 159/300
380/380 [==============================] - 0s - loss: 0.0652 - acc: 0.9763 - val_loss: 0.1000 - val_acc: 0.9632
Epoch 160/300
380/380 [==============================] - 0s - loss: 0.0763 - acc: 0.9763 - val_loss: 0.0992 - val_acc: 0.9625
Epoch 161/300
380/380 [==============================] - 0s - loss: 0.0691 - acc: 0.9737 - val_loss: 0.0953 - val_acc: 0.9632
Epoch 162/300
380/380 [==============================] - 0s - loss: 0.0632 - acc: 0.9763 - val_loss: 0.0932 - val_acc: 0.9645
Epoch 163/300
380/380 [==============================] - 0s - loss: 0.0689 - acc: 0.9789 - val_loss: 0.0927 - val_acc: 0.9652
Epoch 164/300
380/380 [==============================] - 0s - loss: 0.0646 - acc: 0.9816 - val_loss: 0.0929 - val_acc: 0.9645
Epoch 165/300
380/380 [==============================] - 0s - loss: 0.0652 - acc: 0.9816 - val_loss: 0.0933 - val_acc: 0.9638
Epoch 166/300
380/380 [==============================] - 0s - loss: 0.0804 - acc: 0.9763 - val_loss: 0.0939 - val_acc: 0.9645
Epoch 167/300
380/380 [==============================] - 0s - loss: 0.0665 - acc: 0.9737 - val_loss: 0.0946 - val_acc: 0.9632
Epoch 168/300
380/380 [==============================] - 0s - loss: 0.0660 - acc: 0.9816 - val_loss: 0.0951 - val_acc: 0.9632
Epoch 169/300
380/380 [==============================] - 0s - loss: 0.0701 - acc: 0.9737 - val_loss: 0.0963 - val_acc: 0.9625
Epoch 170/300
380/380 [==============================] - 0s - loss: 0.0596 - acc: 0.9763 - val_loss: 0.0966 - val_acc: 0.9625
Epoch 171/300
380/380 [==============================] - 0s - loss: 0.0639 - acc: 0.9737 - val_loss: 0.0942 - val_acc: 0.9638
Epoch 172/300
380/380 [==============================] - 0s - loss: 0.0567 - acc: 0.9868 - val_loss: 0.0928 - val_acc: 0.9625
Epoch 173/300
380/380 [==============================] - 0s - loss: 0.0721 - acc: 0.9763 - val_loss: 0.0925 - val_acc: 0.9632
Epoch 174/300
380/380 [==============================] - 0s - loss: 0.0606 - acc: 0.9816 - val_loss: 0.0927 - val_acc: 0.9632
Epoch 175/300
380/380 [==============================] - 0s - loss: 0.0616 - acc: 0.9842 - val_loss: 0.0930 - val_acc: 0.9625
Epoch 176/300
380/380 [==============================] - 0s - loss: 0.0716 - acc: 0.9789 - val_loss: 0.0944 - val_acc: 0.9632
Epoch 177/300
380/380 [==============================] - 0s - loss: 0.0644 - acc: 0.9763 - val_loss: 0.0964 - val_acc: 0.9632
Epoch 178/300
380/380 [==============================] - 0s - loss: 0.0619 - acc: 0.9763 - val_loss: 0.0990 - val_acc: 0.9625
Epoch 179/300
380/380 [==============================] - 0s - loss: 0.0609 - acc: 0.9737 - val_loss: 0.0977 - val_acc: 0.9638
Epoch 180/300
380/380 [==============================] - 0s - loss: 0.0771 - acc: 0.9789 - val_loss: 0.0953 - val_acc: 0.9619
Epoch 181/300
380/380 [==============================] - 0s - loss: 0.0596 - acc: 0.9816 - val_loss: 0.0941 - val_acc: 0.9632
Epoch 182/300
380/380 [==============================] - 0s - loss: 0.0611 - acc: 0.9816 - val_loss: 0.0937 - val_acc: 0.9645
Epoch 183/300
380/380 [==============================] - 0s - loss: 0.0569 - acc: 0.9816 - val_loss: 0.0940 - val_acc: 0.9645
Epoch 184/300
380/380 [==============================] - 0s - loss: 0.0556 - acc: 0.9789 - val_loss: 0.0949 - val_acc: 0.9632
Epoch 185/300
380/380 [==============================] - 0s - loss: 0.0553 - acc: 0.9842 - val_loss: 0.0970 - val_acc: 0.9625
Epoch 186/300
380/380 [==============================] - 0s - loss: 0.0533 - acc: 0.9737 - val_loss: 0.0975 - val_acc: 0.9625
Epoch 187/300
380/380 [==============================] - 0s - loss: 0.0665 - acc: 0.9816 - val_loss: 0.0955 - val_acc: 0.9632
Epoch 188/300
380/380 [==============================] - 0s - loss: 0.0695 - acc: 0.9737 - val_loss: 0.0943 - val_acc: 0.9632
Epoch 189/300
380/380 [==============================] - 0s - loss: 0.0630 - acc: 0.9763 - val_loss: 0.0934 - val_acc: 0.9638
Epoch 190/300
380/380 [==============================] - 0s - loss: 0.0637 - acc: 0.9711 - val_loss: 0.0933 - val_acc: 0.9638
Epoch 191/300
380/380 [==============================] - 0s - loss: 0.0513 - acc: 0.9816 - val_loss: 0.0937 - val_acc: 0.9625
Epoch 192/300
380/380 [==============================] - 0s - loss: 0.0559 - acc: 0.9842 - val_loss: 0.0940 - val_acc: 0.9619
Epoch 193/300
380/380 [==============================] - 0s - loss: 0.0674 - acc: 0.9763 - val_loss: 0.0943 - val_acc: 0.9625
Epoch 194/300
380/380 [==============================] - 0s - loss: 0.0606 - acc: 0.9816 - val_loss: 0.0943 - val_acc: 0.9625
Epoch 195/300
380/380 [==============================] - 0s - loss: 0.0599 - acc: 0.9737 - val_loss: 0.0936 - val_acc: 0.9625
Epoch 196/300
380/380 [==============================] - 0s - loss: 0.0568 - acc: 0.9763 - val_loss: 0.0940 - val_acc: 0.9625
Epoch 197/300
380/380 [==============================] - 0s - loss: 0.0595 - acc: 0.9763 - val_loss: 0.0941 - val_acc: 0.9625
Epoch 198/300
380/380 [==============================] - 0s - loss: 0.0645 - acc: 0.9789 - val_loss: 0.0948 - val_acc: 0.9632
Epoch 199/300
380/380 [==============================] - 0s - loss: 0.0633 - acc: 0.9737 - val_loss: 0.0962 - val_acc: 0.9619
Epoch 200/300
380/380 [==============================] - 0s - loss: 0.0509 - acc: 0.9842 - val_loss: 0.0983 - val_acc: 0.9625
Epoch 201/300
380/380 [==============================] - 0s - loss: 0.0640 - acc: 0.9816 - val_loss: 0.0978 - val_acc: 0.9612
Epoch 202/300
380/380 [==============================] - 0s - loss: 0.0604 - acc: 0.9868 - val_loss: 0.0962 - val_acc: 0.9619
Epoch 203/300
380/380 [==============================] - 0s - loss: 0.0633 - acc: 0.9684 - val_loss: 0.0952 - val_acc: 0.9638
Epoch 204/300
380/380 [==============================] - 0s - loss: 0.0587 - acc: 0.9789 - val_loss: 0.0951 - val_acc: 0.9638
Epoch 205/300
380/380 [==============================] - 0s - loss: 0.0607 - acc: 0.9737 - val_loss: 0.0955 - val_acc: 0.9638
Epoch 206/300
380/380 [==============================] - 0s - loss: 0.0587 - acc: 0.9789 - val_loss: 0.0961 - val_acc: 0.9632
Epoch 207/300
380/380 [==============================] - 0s - loss: 0.0597 - acc: 0.9789 - val_loss: 0.0971 - val_acc: 0.9612
Epoch 208/300
380/380 [==============================] - 0s - loss: 0.0630 - acc: 0.9763 - val_loss: 0.0988 - val_acc: 0.9612
Epoch 209/300
380/380 [==============================] - 0s - loss: 0.0601 - acc: 0.9763 - val_loss: 0.0997 - val_acc: 0.9619
Epoch 210/300
380/380 [==============================] - 0s - loss: 0.0527 - acc: 0.9789 - val_loss: 0.0997 - val_acc: 0.9619
Epoch 211/300
380/380 [==============================] - 0s - loss: 0.0473 - acc: 0.9842 - val_loss: 0.0979 - val_acc: 0.9606
Epoch 212/300
380/380 [==============================] - 0s - loss: 0.0510 - acc: 0.9763 - val_loss: 0.0960 - val_acc: 0.9625
Epoch 213/300
380/380 [==============================] - 0s - loss: 0.0660 - acc: 0.9711 - val_loss: 0.0955 - val_acc: 0.9632
Epoch 214/300
380/380 [==============================] - 0s - loss: 0.0623 - acc: 0.9789 - val_loss: 0.0953 - val_acc: 0.9632
Epoch 215/300
380/380 [==============================] - 0s - loss: 0.0511 - acc: 0.9842 - val_loss: 0.0962 - val_acc: 0.9625
Epoch 216/300
380/380 [==============================] - 0s - loss: 0.0497 - acc: 0.9842 - val_loss: 0.0982 - val_acc: 0.9606
Epoch 217/300
380/380 [==============================] - 0s - loss: 0.0557 - acc: 0.9816 - val_loss: 0.1004 - val_acc: 0.9606
Epoch 218/300
380/380 [==============================] - 0s - loss: 0.0545 - acc: 0.9711 - val_loss: 0.1012 - val_acc: 0.9619
Epoch 219/300
380/380 [==============================] - 0s - loss: 0.0598 - acc: 0.9737 - val_loss: 0.1007 - val_acc: 0.9606
Epoch 220/300
380/380 [==============================] - 0s - loss: 0.0680 - acc: 0.9737 - val_loss: 0.0992 - val_acc: 0.9599
Epoch 221/300
380/380 [==============================] - 0s - loss: 0.0495 - acc: 0.9763 - val_loss: 0.0975 - val_acc: 0.9619
Epoch 222/300
380/380 [==============================] - 0s - loss: 0.0518 - acc: 0.9763 - val_loss: 0.0971 - val_acc: 0.9619
Epoch 223/300
380/380 [==============================] - 0s - loss: 0.0607 - acc: 0.9737 - val_loss: 0.0978 - val_acc: 0.9619
Epoch 224/300
380/380 [==============================] - 0s - loss: 0.0456 - acc: 0.9789 - val_loss: 0.0989 - val_acc: 0.9606
Epoch 225/300
380/380 [==============================] - 0s - loss: 0.0536 - acc: 0.9763 - val_loss: 0.0996 - val_acc: 0.9599
Epoch 226/300
380/380 [==============================] - 0s - loss: 0.0548 - acc: 0.9763 - val_loss: 0.0983 - val_acc: 0.9612
Epoch 227/300
380/380 [==============================] - 0s - loss: 0.0585 - acc: 0.9789 - val_loss: 0.0974 - val_acc: 0.9619
Epoch 228/300
380/380 [==============================] - 0s - loss: 0.0555 - acc: 0.9842 - val_loss: 0.0971 - val_acc: 0.9619
Epoch 229/300
380/380 [==============================] - 0s - loss: 0.0545 - acc: 0.9711 - val_loss: 0.0983 - val_acc: 0.9606
Epoch 230/300
380/380 [==============================] - 0s - loss: 0.0535 - acc: 0.9816 - val_loss: 0.1006 - val_acc: 0.9612
Epoch 231/300
380/380 [==============================] - 0s - loss: 0.0557 - acc: 0.9763 - val_loss: 0.1015 - val_acc: 0.9612
Epoch 232/300
380/380 [==============================] - 0s - loss: 0.0511 - acc: 0.9842 - val_loss: 0.1000 - val_acc: 0.9606
Epoch 233/300
380/380 [==============================] - 0s - loss: 0.0573 - acc: 0.9737 - val_loss: 0.0981 - val_acc: 0.9599
Epoch 234/300
380/380 [==============================] - 0s - loss: 0.0629 - acc: 0.9711 - val_loss: 0.0976 - val_acc: 0.9612
Epoch 235/300
380/380 [==============================] - 0s - loss: 0.0522 - acc: 0.9816 - val_loss: 0.0972 - val_acc: 0.9619
Epoch 236/300
380/380 [==============================] - 0s - loss: 0.0532 - acc: 0.9816 - val_loss: 0.0975 - val_acc: 0.9612
Epoch 237/300
380/380 [==============================] - 0s - loss: 0.0533 - acc: 0.9816 - val_loss: 0.0974 - val_acc: 0.9619
Epoch 238/300
380/380 [==============================] - 0s - loss: 0.0626 - acc: 0.9737 - val_loss: 0.0984 - val_acc: 0.9599
Epoch 239/300
380/380 [==============================] - 0s - loss: 0.0546 - acc: 0.9789 - val_loss: 0.0987 - val_acc: 0.9599
Epoch 240/300
380/380 [==============================] - 0s - loss: 0.0589 - acc: 0.9763 - val_loss: 0.0986 - val_acc: 0.9612
Epoch 241/300
380/380 [==============================] - 0s - loss: 0.0514 - acc: 0.9789 - val_loss: 0.0998 - val_acc: 0.9612
Epoch 242/300
380/380 [==============================] - 0s - loss: 0.0583 - acc: 0.9711 - val_loss: 0.1006 - val_acc: 0.9612
Epoch 243/300
380/380 [==============================] - 0s - loss: 0.0568 - acc: 0.9789 - val_loss: 0.1006 - val_acc: 0.9619
Epoch 244/300
380/380 [==============================] - 0s - loss: 0.0414 - acc: 0.9816 - val_loss: 0.1007 - val_acc: 0.9619
Epoch 245/300
380/380 [==============================] - 0s - loss: 0.0547 - acc: 0.9816 - val_loss: 0.1002 - val_acc: 0.9625
Epoch 246/300
380/380 [==============================] - 0s - loss: 0.0502 - acc: 0.9789 - val_loss: 0.0988 - val_acc: 0.9619
Epoch 247/300
380/380 [==============================] - 0s - loss: 0.0603 - acc: 0.9763 - val_loss: 0.0983 - val_acc: 0.9612
Epoch 248/300
380/380 [==============================] - 0s - loss: 0.0517 - acc: 0.9789 - val_loss: 0.0990 - val_acc: 0.9625
Epoch 249/300
380/380 [==============================] - 0s - loss: 0.0561 - acc: 0.9763 - val_loss: 0.1005 - val_acc: 0.9625
Epoch 250/300
380/380 [==============================] - 0s - loss: 0.0489 - acc: 0.9868 - val_loss: 0.1013 - val_acc: 0.9625
Epoch 251/300
380/380 [==============================] - 0s - loss: 0.0530 - acc: 0.9763 - val_loss: 0.1003 - val_acc: 0.9625
Epoch 252/300
380/380 [==============================] - 0s - loss: 0.0546 - acc: 0.9789 - val_loss: 0.0996 - val_acc: 0.9625
Epoch 253/300
380/380 [==============================] - 0s - loss: 0.0521 - acc: 0.9737 - val_loss: 0.0995 - val_acc: 0.9625
Epoch 254/300
380/380 [==============================] - 0s - loss: 0.0417 - acc: 0.9789 - val_loss: 0.1003 - val_acc: 0.9625
Epoch 255/300
380/380 [==============================] - 0s - loss: 0.0524 - acc: 0.9895 - val_loss: 0.1002 - val_acc: 0.9619
Epoch 256/300
380/380 [==============================] - 0s - loss: 0.0501 - acc: 0.9842 - val_loss: 0.1006 - val_acc: 0.9619
Epoch 257/300
380/380 [==============================] - 0s - loss: 0.0475 - acc: 0.9816 - val_loss: 0.1025 - val_acc: 0.9632
Epoch 258/300
380/380 [==============================] - 0s - loss: 0.0551 - acc: 0.9789 - val_loss: 0.1037 - val_acc: 0.9632
Epoch 259/300
380/380 [==============================] - 0s - loss: 0.0535 - acc: 0.9763 - val_loss: 0.1035 - val_acc: 0.9632
Epoch 260/300
380/380 [==============================] - 0s - loss: 0.0480 - acc: 0.9816 - val_loss: 0.1031 - val_acc: 0.9625
Epoch 261/300
380/380 [==============================] - 0s - loss: 0.0465 - acc: 0.9842 - val_loss: 0.1006 - val_acc: 0.9619
Epoch 262/300
380/380 [==============================] - 0s - loss: 0.0525 - acc: 0.9711 - val_loss: 0.0995 - val_acc: 0.9625
Epoch 263/300
380/380 [==============================] - 0s - loss: 0.0504 - acc: 0.9789 - val_loss: 0.0997 - val_acc: 0.9612
Epoch 264/300
380/380 [==============================] - 0s - loss: 0.0543 - acc: 0.9737 - val_loss: 0.1008 - val_acc: 0.9619
Epoch 265/300
380/380 [==============================] - 0s - loss: 0.0477 - acc: 0.9842 - val_loss: 0.1024 - val_acc: 0.9606
Epoch 266/300
380/380 [==============================] - 0s - loss: 0.0528 - acc: 0.9868 - val_loss: 0.1034 - val_acc: 0.9606
Epoch 267/300
380/380 [==============================] - 0s - loss: 0.0437 - acc: 0.9816 - val_loss: 0.1035 - val_acc: 0.9612
Epoch 268/300
380/380 [==============================] - 0s - loss: 0.0505 - acc: 0.9816 - val_loss: 0.1028 - val_acc: 0.9625
Epoch 269/300
380/380 [==============================] - 0s - loss: 0.0549 - acc: 0.9816 - val_loss: 0.1016 - val_acc: 0.9619
Epoch 270/300
380/380 [==============================] - 0s - loss: 0.0501 - acc: 0.9789 - val_loss: 0.1013 - val_acc: 0.9625
Epoch 271/300
380/380 [==============================] - 0s - loss: 0.0442 - acc: 0.9789 - val_loss: 0.1017 - val_acc: 0.9632
Epoch 272/300
380/380 [==============================] - 0s - loss: 0.0558 - acc: 0.9789 - val_loss: 0.1026 - val_acc: 0.9638
Epoch 273/300
380/380 [==============================] - 0s - loss: 0.0479 - acc: 0.9842 - val_loss: 0.1036 - val_acc: 0.9632
Epoch 274/300
380/380 [==============================] - 0s - loss: 0.0595 - acc: 0.9763 - val_loss: 0.1041 - val_acc: 0.9632
Epoch 275/300
380/380 [==============================] - 0s - loss: 0.0528 - acc: 0.9842 - val_loss: 0.1030 - val_acc: 0.9632
Epoch 276/300
380/380 [==============================] - 0s - loss: 0.0421 - acc: 0.9842 - val_loss: 0.1022 - val_acc: 0.9632
Epoch 277/300
380/380 [==============================] - 0s - loss: 0.0471 - acc: 0.9763 - val_loss: 0.1026 - val_acc: 0.9619
Epoch 278/300
380/380 [==============================] - 0s - loss: 0.0476 - acc: 0.9789 - val_loss: 0.1044 - val_acc: 0.9632
Epoch 279/300
380/380 [==============================] - 0s - loss: 0.0465 - acc: 0.9868 - val_loss: 0.1070 - val_acc: 0.9606
Epoch 280/300
380/380 [==============================] - 0s - loss: 0.0418 - acc: 0.9895 - val_loss: 0.1079 - val_acc: 0.9606
Epoch 281/300
380/380 [==============================] - 0s - loss: 0.0480 - acc: 0.9816 - val_loss: 0.1049 - val_acc: 0.9599
Epoch 282/300
380/380 [==============================] - 0s - loss: 0.0449 - acc: 0.9789 - val_loss: 0.1018 - val_acc: 0.9625
Epoch 283/300
380/380 [==============================] - 0s - loss: 0.0537 - acc: 0.9789 - val_loss: 0.1002 - val_acc: 0.9645
Epoch 284/300
380/380 [==============================] - 0s - loss: 0.0515 - acc: 0.9816 - val_loss: 0.0999 - val_acc: 0.9652
Epoch 285/300
380/380 [==============================] - 0s - loss: 0.0437 - acc: 0.9789 - val_loss: 0.1003 - val_acc: 0.9632
Epoch 286/300
380/380 [==============================] - 0s - loss: 0.0539 - acc: 0.9789 - val_loss: 0.1012 - val_acc: 0.9625
Epoch 287/300
380/380 [==============================] - 0s - loss: 0.0485 - acc: 0.9842 - val_loss: 0.1041 - val_acc: 0.9625
Epoch 288/300
380/380 [==============================] - 0s - loss: 0.0464 - acc: 0.9816 - val_loss: 0.1071 - val_acc: 0.9606
Epoch 289/300
380/380 [==============================] - 0s - loss: 0.0499 - acc: 0.9816 - val_loss: 0.1077 - val_acc: 0.9606
Epoch 290/300
380/380 [==============================] - 0s - loss: 0.0451 - acc: 0.9842 - val_loss: 0.1041 - val_acc: 0.9632
Epoch 291/300
380/380 [==============================] - 0s - loss: 0.0587 - acc: 0.9737 - val_loss: 0.1011 - val_acc: 0.9638
Epoch 292/300
380/380 [==============================] - 0s - loss: 0.0495 - acc: 0.9789 - val_loss: 0.1008 - val_acc: 0.9645
Epoch 293/300
380/380 [==============================] - 0s - loss: 0.0460 - acc: 0.9816 - val_loss: 0.1017 - val_acc: 0.9638
Epoch 294/300
380/380 [==============================] - 0s - loss: 0.0464 - acc: 0.9816 - val_loss: 0.1034 - val_acc: 0.9625
Epoch 295/300
380/380 [==============================] - 0s - loss: 0.0389 - acc: 0.9868 - val_loss: 0.1051 - val_acc: 0.9619
Epoch 296/300
380/380 [==============================] - 0s - loss: 0.0347 - acc: 0.9947 - val_loss: 0.1063 - val_acc: 0.9625
Epoch 297/300
380/380 [==============================] - 0s - loss: 0.0477 - acc: 0.9842 - val_loss: 0.1061 - val_acc: 0.9619
Epoch 298/300
380/380 [==============================] - 0s - loss: 0.0434 - acc: 0.9842 - val_loss: 0.1070 - val_acc: 0.9612
Epoch 299/300
380/380 [==============================] - 0s - loss: 0.0551 - acc: 0.9763 - val_loss: 0.1078 - val_acc: 0.9606
Epoch 300/300
380/380 [==============================] - 0s - loss: 0.0395 - acc: 0.9868 - val_loss: 0.1069 - val_acc: 0.9619
 640/1267 [==============>...............] - ETA: 0s

accuracy= 0.97000789266
  32/1267 [..............................] - ETA: 1s

In [20]:
prediction2 = prediction
prediction2_list = prediction2.reshape(-1).astype(int)
label2_list = test_label.astype(int)

print(classification_report(label2_list, prediction2_list))
print(confusion_matrix(label2_list, prediction2_list))


             precision    recall  f1-score   support

          0       0.97      0.97      0.97       624
          1       0.97      0.97      0.97       643

avg / total       0.97      0.97      0.97      1267

[[608  16]
 [ 22 621]]

In [23]:
conf = confusion_matrix(label2_list, prediction2_list)
f, ax= plt.subplots(figsize = (5, 5))
sns.heatmap(conf, annot=True, ax=ax, fmt='d') 
ax.xaxis.set_ticks_position('top') #Making x label be on top is common in textbooks.
plt.show()



In [24]:
def model_efficacy(conf):
    total_num = np.sum(conf)
    sen = conf[0][0]/(conf[0][0]+conf[1][0])
    spe = conf[1][1]/(conf[1][0]+conf[1][1])
    false_positive_rate = conf[0][1]/(conf[0][1]+conf[1][1])
    false_negative_rate = conf[1][0]/(conf[0][0]+conf[1][0])
    
    print('total_num: ',total_num)
    print('G1P1: ',conf[0][0]) #G = gold standard; P = prediction
    print('G0P1: ',conf[0][1])
    print('G1P0: ',conf[1][0])
    print('G0P0: ',conf[1][1])
    print('##########################')
    print('sensitivity: ',sen)
    print('specificity: ',spe)
    print('false_positive_rate: ',false_positive_rate)
    print('false_negative_rate: ',false_negative_rate)
    
    return total_num, sen, spe, false_positive_rate, false_negative_rate

conf = confusion_matrix(label2_list, prediction2_list)
model_efficacy(conf)


total_num:  1267
G1P1:  608
G0P1:  16
G1P0:  22
G0P0:  621
##########################
sensitivity:  0.965079365079
specificity:  0.965785381026
false_positive_rate:  0.0251177394035
false_negative_rate:  0.0349206349206
Out[24]:
(1267,
 0.96507936507936509,
 0.96578538102643852,
 0.025117739403453691,
 0.034920634920634921)

In [27]:
false_positive_rate, true_positive_rate, thresholds = roc_curve(label2_list, prediction2_list)
roc_auc = auc(false_positive_rate, true_positive_rate)

plt.title('Receiver Operating Characteristic')
plt.plot(false_positive_rate, true_positive_rate, 'b', label='AUC = %0.2f'% roc_auc)
plt.legend(loc='lower right')
plt.plot([0,1],[0,1],'r--')
plt.xlim([-0.1,1.2])
plt.ylim([-0.1,1.2])
plt.ylabel('True Positive Rate')
plt.xlabel('False Positive Rate')
plt.show()


Scikit-learn: RandomForestClassifier,比MLP還要好


In [42]:
# Scikit-learn models
from sklearn import datasets,cross_validation,ensemble

X_train,X_test,y_train,y_test = cross_validation.train_test_split(train_feature_trans,train_label, 
                                              test_size=0.25, random_state=0,stratify=train_label) #分層取樣
clf=ensemble.RandomForestClassifier()
clf.fit(X_train,y_train)
print("Traing Score:%f"%clf.score(train_feature_trans,train_label))
print("Testing Score:%f"%clf.score(test_feature_trans,test_label))


Traing Score:0.993688
Testing Score:0.979479

In [43]:
prediction2 = clf.predict(test_feature_trans)
prediction2_list = prediction2.reshape(-1).astype(int)
label2_list = test_label.astype(int)

print(classification_report(label2_list, prediction2_list))
print(confusion_matrix(label2_list, prediction2_list))


             precision    recall  f1-score   support

          0       0.97      0.99      0.98       624
          1       0.99      0.97      0.98       643

avg / total       0.98      0.98      0.98      1267

[[616   8]
 [ 18 625]]

In [44]:
# conf heatmap
conf = confusion_matrix(label2_list, prediction2_list)
f, ax= plt.subplots(figsize = (5, 5))
sns.heatmap(conf, annot=True, ax=ax, fmt='d') 
ax.xaxis.set_ticks_position('top') #Making x label be on top is common in textbooks.
plt.show()



In [45]:
# model_efficacy
def model_efficacy(conf):
    total_num = np.sum(conf)
    sen = conf[0][0]/(conf[0][0]+conf[1][0])
    spe = conf[1][1]/(conf[1][0]+conf[1][1])
    false_positive_rate = conf[0][1]/(conf[0][1]+conf[1][1])
    false_negative_rate = conf[1][0]/(conf[0][0]+conf[1][0])
    
    print('total_num: ',total_num)
    print('G1P1: ',conf[0][0]) #G = gold standard; P = prediction
    print('G0P1: ',conf[0][1])
    print('G1P0: ',conf[1][0])
    print('G0P0: ',conf[1][1])
    print('##########################')
    print('sensitivity: ',sen)
    print('specificity: ',spe)
    print('false_positive_rate: ',false_positive_rate)
    print('false_negative_rate: ',false_negative_rate)
    
    return total_num, sen, spe, false_positive_rate, false_negative_rate

conf = confusion_matrix(label2_list, prediction2_list)
model_efficacy(conf)


total_num:  1267
G1P1:  616
G0P1:  8
G1P0:  18
G0P0:  625
##########################
sensitivity:  0.971608832808
specificity:  0.97200622084
false_positive_rate:  0.0126382306477
false_negative_rate:  0.0283911671924
Out[45]:
(1267,
 0.97160883280757093,
 0.97200622083981336,
 0.012638230647709321,
 0.028391167192429023)

In [46]:
# ROC curve
false_positive_rate, true_positive_rate, thresholds = roc_curve(label2_list, prediction2_list)
roc_auc = auc(false_positive_rate, true_positive_rate)

plt.title('Receiver Operating Characteristic')
plt.plot(false_positive_rate, true_positive_rate, 'b', label='AUC = %0.2f'% roc_auc)
plt.legend(loc='lower right')
plt.plot([0,1],[0,1],'r--')
plt.xlim([-0.1,1.2])
plt.ylim([-0.1,1.2])
plt.ylabel('True Positive Rate')
plt.xlabel('False Positive Rate')
plt.show()


PCA降低維度測試,效果變差一些


In [90]:
pca = PCA(n_components=10)
pca.fit(train_feature_trans)
pca_score = pca.explained_variance_ratio_
pca_score


Out[90]:
array([ 0.5096875 ,  0.11779022,  0.08727398,  0.0626329 ,  0.05316123,
        0.03776423,  0.0328923 ,  0.0252862 ,  0.02254116,  0.0172489 ])

In [91]:
train_feature_trans_PCA = pca.transform(train_feature_trans)
test_feature_trans_PCA = pca.transform(test_feature_trans)

In [92]:
# Scikit-learn models
from sklearn import datasets,cross_validation,ensemble

X_train,X_test,y_train,y_test = cross_validation.train_test_split(train_feature_trans_PCA,train_label, 
                                              test_size=0.25, random_state=0,stratify=train_label) #分層取樣
clf=ensemble.RandomForestClassifier()
clf.fit(X_train,y_train)
print("Traing Score:%f"%clf.score(train_feature_trans_PCA,train_label))
print("Testing Score:%f"%clf.score(test_feature_trans_PCA,test_label))


Traing Score:0.988427
Testing Score:0.959747

In [93]:
prediction2 = clf.predict(test_feature_trans_PCA)
prediction2_list = prediction2.reshape(-1).astype(int)
label2_list = test_label.astype(int)

print(classification_report(label2_list, prediction2_list))
print(confusion_matrix(label2_list, prediction2_list))


             precision    recall  f1-score   support

          0       0.94      0.98      0.96       624
          1       0.98      0.94      0.96       643

avg / total       0.96      0.96      0.96      1267

[[613  11]
 [ 40 603]]

In [96]:
# conf heatmap
conf = confusion_matrix(label2_list, prediction2_list)
f, ax= plt.subplots(figsize = (5, 5))
sns.heatmap(conf, annot=True, ax=ax, fmt='d') 
ax.xaxis.set_ticks_position('top') #Making x label be on top is common in textbooks.
plt.show()



In [97]:
plt.scatter(train_feature_trans_PCA[:,0],train_feature_trans_PCA[:,1], alpha=0.3, 
            c=train_label , cmap=cm.jet, vmin=0., vmax=1.)
plt.show()



In [101]:
from mpl_toolkits.mplot3d import Axes3D
fig = plt.figure(figsize=(10,10))
ax = fig.add_subplot(111, projection='3d')

ax.scatter(train_feature_trans_PCA[:,0],train_feature_trans_PCA[:,1], train_feature_trans_PCA[:,2], alpha=0.3, 
               c=train_label , cmap=cm.jet, vmin=0., vmax=1.)
plt.show()



In [ ]: