In [2]:
# import common APIs
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
%matplotlib inline
from mpl_toolkits.mplot3d import Axes3D
import seaborn as sns
import os
from sklearn.preprocessing import MinMaxScaler, StandardScaler, LabelEncoder, OneHotEncoder
from sklearn.decomposition import PCA
from sklearn import cross_validation, naive_bayes, tree, svm, ensemble
from sklearn.metrics import classification_report,confusion_matrix,precision_recall_curve,auc,roc_auc_score,roc_curve
from xgboost import XGBClassifier

In [5]:
# Data observation
filepath = '/Users/mac/Desktop/Kaggle_datasets/Glass_classification/'
filename01 = 'glass.csv'

df = pd.read_csv(os.path.join(filepath, filename01))
df.head()


Out[5]:
RI Na Mg Al Si K Ca Ba Fe Type
0 1.52101 13.64 4.49 1.10 71.78 0.06 8.75 0.0 0.0 1
1 1.51761 13.89 3.60 1.36 72.73 0.48 7.83 0.0 0.0 1
2 1.51618 13.53 3.55 1.54 72.99 0.39 7.78 0.0 0.0 1
3 1.51766 13.21 3.69 1.29 72.61 0.57 8.22 0.0 0.0 1
4 1.51742 13.27 3.62 1.24 73.08 0.55 8.07 0.0 0.0 1

In [7]:
df.info()


<class 'pandas.core.frame.DataFrame'>
RangeIndex: 214 entries, 0 to 213
Data columns (total 10 columns):
RI      214 non-null float64
Na      214 non-null float64
Mg      214 non-null float64
Al      214 non-null float64
Si      214 non-null float64
K       214 non-null float64
Ca      214 non-null float64
Ba      214 non-null float64
Fe      214 non-null float64
Type    214 non-null int64
dtypes: float64(9), int64(1)
memory usage: 16.8 KB

In [6]:
df.Type.value_counts() # 注意根本沒有type 4


Out[6]:
2    76
1    70
7    29
3    17
5    13
6     9
Name: Type, dtype: int64

In [13]:
sns.countplot(x="Type", data=df, palette="Greens_d");



In [15]:
sns.barplot(x="Type", y="RI", data=df); #預設取y的平均值



In [16]:
sns.stripplot(x="Type", y="RI", data=df, jitter=True); #讓overlap的部分可以擺在橫向上



In [20]:
sns.barplot(x="Type", y="Na", data=df); #預設取y的平均值



In [19]:
sns.stripplot(x="Type", y="Na", data=df, jitter=True)


Out[19]:
<matplotlib.axes._subplots.AxesSubplot at 0x113d08128>

In [11]:
sns.regplot(x="Type", y="RI", data=df, x_jitter=.05);



In [12]:
sns.lmplot(x="Na", y="Mg", hue="Type", data=df)


Out[12]:
<seaborn.axisgrid.FacetGrid at 0x113839208>

In [41]:
T1 = df.loc[df.Type == 1]
T2 = df.loc[df.Type == 2]
T3 = df.loc[df.Type == 3]
T5 = df.loc[df.Type == 5]
T6 = df.loc[df.Type == 6]
T7 = df.loc[df.Type == 7]

In [63]:
### 分開的彩色等高線熱點圖
f, ([ax1,ax2],[ax3,ax5]) = plt.subplots(2,2,figsize=(10, 10))
#cmap = sns.cubehelix_palette(as_cmap=True, dark=0, light=1, reverse=True)
ax1 = sns.kdeplot(T1.Na, T1.Mg, cmap='Reds', n_levels=60, shade=True, ax=ax1);
ax2 = sns.kdeplot(T2.Na, T2.Mg, cmap='Blues', n_levels=60, shade=True, ax=ax2);
ax3 = sns.kdeplot(T3.Na, T3.Mg, cmap='Greens', n_levels=60, shade=True, ax=ax3);
ax5 = sns.kdeplot(T5.Na, T5.Mg, cmap='Greys', n_levels=60, shade=True, ax=ax5);



In [64]:
### 疊再一起的等高線彩圖,顏色不會融合,醜XD
f, ax = plt.subplots(figsize=(10, 10))
#cmap = sns.cubehelix_palette(as_cmap=True, dark=0, light=1, reverse=True)
ax = sns.kdeplot(T1.Na, T1.Mg, cmap='Reds', n_levels=60, shade_lowest=False, shade=True,);
ax = sns.kdeplot(T2.Na, T2.Mg, cmap='Blues', n_levels=60, shade_lowest=False, shade=True, );
ax = sns.kdeplot(T3.Na, T3.Mg, cmap='Greens', n_levels=60, shade_lowest=False, shade=True, );
ax = sns.kdeplot(T5.Na, T5.Mg, cmap='Greys', n_levels=60, shade_lowest=False, shade=True, );



In [21]:
# Data preprocessing
from sklearn.utils import shuffle

shuffle_df = shuffle(df, random_state=42)

df_label = shuffle_df['Type']
df_feature = shuffle_df.drop('Type', axis=1)

cut_point = round(len(df)*0.6)
train_feature = np.array(df_feature.values[:cut_point,:])
train_label = np.array(df_label.values[:cut_point])
test_feature = np.array(df_feature.values[cut_point:,:])
test_label = np.array(df_label.values[cut_point:])

Scikit-Learn: DecisionTreeClassifier 和 ensemblerandomforest 還是最強


In [74]:
### naive_bayes.GaussianNB()
from sklearn import cross_validation, naive_bayes
X_train,X_test,y_train,y_test = cross_validation.train_test_split(train_feature,train_label, 
                                              test_size=0.25, random_state=0,stratify=train_label)
clf=naive_bayes.GaussianNB()
clf.fit(X_train,y_train)
print("Traing Score:%f"%clf.score(train_feature,train_label))
print("Testing Score:%f"%clf.score(test_feature,test_label))

y_predict = clf.predict(X_test)
print('\n'+classification_report(y_test,y_predict))

y_predict2 = clf.predict(test_feature)
print('\n'+classification_report(test_label,y_predict2))


Traing Score:0.375000
Testing Score:0.313953

             precision    recall  f1-score   support

          1       0.00      0.00      0.00        11
          2       0.00      0.00      0.00        12
          3       0.10      0.67      0.17         3
          5       1.00      1.00      1.00         2
          6       0.00      0.00      0.00         1
          7       1.00      1.00      1.00         3

avg / total       0.17      0.22      0.17        32


             precision    recall  f1-score   support

          1       0.00      0.00      0.00        28
          2       0.31      0.18      0.23        28
          3       0.11      0.83      0.20         6
          5       0.33      0.33      0.33         6
          6       0.60      1.00      0.75         3
          7       0.86      0.80      0.83        15

avg / total       0.30      0.31      0.28        86

//anaconda/lib/python3.5/site-packages/sklearn/metrics/classification.py:1135: UndefinedMetricWarning: Precision and F-score are ill-defined and being set to 0.0 in labels with no predicted samples.
  'precision', 'predicted', average, warn_for)

In [26]:
### naive_bayes.MultinomialNB()
from sklearn import cross_validation, naive_bayes
X_train,X_test,y_train,y_test = cross_validation.train_test_split(train_feature,train_label, 
                                              test_size=0.25, random_state=0,stratify=train_label)
clf=naive_bayes.MultinomialNB()
clf.fit(X_train,y_train)
print("Traing Score:%f"%clf.score(train_feature,train_label))
print("Testing Score:%f"%clf.score(test_feature,test_label))

y_predict = clf.predict(X_test)
print('\n'+classification_report(y_test,y_predict))

y_predict2 = clf.predict(test_feature)
print('\n'+classification_report(test_label,y_predict2))


Traing Score:0.507812
Testing Score:0.430233

             precision    recall  f1-score   support

          1       0.00      0.00      0.00        11
          2       0.48      1.00      0.65        12
          3       0.00      0.00      0.00         3
          5       1.00      0.50      0.67         2
          6       0.00      0.00      0.00         1
          7       0.60      1.00      0.75         3

avg / total       0.30      0.50      0.36        32


             precision    recall  f1-score   support

          1       0.00      0.00      0.00        28
          2       0.37      0.89      0.53        28
          3       0.00      0.00      0.00         6
          5       0.25      0.17      0.20         6
          6       0.00      0.00      0.00         3
          7       0.79      0.73      0.76        15

avg / total       0.28      0.43      0.32        86

//anaconda/lib/python3.5/site-packages/sklearn/metrics/classification.py:1135: UndefinedMetricWarning: Precision and F-score are ill-defined and being set to 0.0 in labels with no predicted samples.
  'precision', 'predicted', average, warn_for)

In [75]:
# confusion matrix
prediction2 = clf.predict(test_feature)
prediction2_list = prediction2.reshape(-1).astype(int)
label2_list = test_label.astype(int)

print(classification_report(label2_list, prediction2_list))
print(confusion_matrix(label2_list, prediction2_list))

# conf heatmap
conf = confusion_matrix(label2_list, prediction2_list)
f, ax= plt.subplots(figsize = (7, 6))
sns.heatmap(conf, annot=True, ax=ax, fmt='d', annot_kws={'size':20}) 
ax.xaxis.set_ticks_position('top') #Making x label be on top is common in textbooks.
plt.show()


             precision    recall  f1-score   support

          1       0.00      0.00      0.00        28
          2       0.31      0.18      0.23        28
          3       0.11      0.83      0.20         6
          5       0.33      0.33      0.33         6
          6       0.60      1.00      0.75         3
          7       0.86      0.80      0.83        15

avg / total       0.30      0.31      0.28        86

[[ 0  7 20  0  1  0]
 [ 1  5 18  3  0  1]
 [ 0  0  5  0  1  0]
 [ 0  3  0  2  0  1]
 [ 0  0  0  0  3  0]
 [ 0  1  1  1  0 12]]

In [65]:
### tree.DecisionTreeClassifier()
from sklearn import cross_validation,tree
X_train,X_test,y_train,y_test = cross_validation.train_test_split(train_feature,train_label, 
                                              test_size=0.25, random_state=0,stratify=train_label)
clf=tree.DecisionTreeClassifier()
clf.fit(X_train,y_train)
print("Traing Score:%f"%clf.score(train_feature,train_label))
print("Testing Score:%f"%clf.score(test_feature,test_label))

y_predict = clf.predict(X_test)
print('\n'+classification_report(y_test,y_predict))

y_predict2 = clf.predict(test_feature)
print('\n'+classification_report(test_label,y_predict2))


Traing Score:0.890625
Testing Score:0.779070

             precision    recall  f1-score   support

          1       0.55      0.55      0.55        11
          2       0.55      0.50      0.52        12
          3       0.00      0.00      0.00         3
          5       1.00      1.00      1.00         2
          6       1.00      1.00      1.00         1
          7       1.00      1.00      1.00         3

avg / total       0.58      0.56      0.57        32


             precision    recall  f1-score   support

          1       0.71      0.79      0.75        28
          2       0.80      0.86      0.83        28
          3       0.67      0.67      0.67         6
          5       0.83      0.83      0.83         6
          6       0.67      0.67      0.67         3
          7       1.00      0.67      0.80        15

avg / total       0.79      0.78      0.78        86


In [73]:
# confusion matrix
prediction2 = clf.predict(test_feature)
prediction2_list = prediction2.reshape(-1).astype(int)
label2_list = test_label.astype(int)

print(classification_report(label2_list, prediction2_list))
print(confusion_matrix(label2_list, prediction2_list))

# conf heatmap
conf = confusion_matrix(label2_list, prediction2_list)
f, ax= plt.subplots(figsize = (7, 6))
sns.heatmap(conf, annot=True, ax=ax, fmt='d', annot_kws={'size':20}) 
ax.xaxis.set_ticks_position('top') #Making x label be on top is common in textbooks.
plt.show()


             precision    recall  f1-score   support

          1       0.71      0.79      0.75        28
          2       0.80      0.86      0.83        28
          3       0.67      0.67      0.67         6
          5       0.83      0.83      0.83         6
          6       0.67      0.67      0.67         3
          7       1.00      0.67      0.80        15

avg / total       0.79      0.78      0.78        86

[[22  5  1  0  0  0]
 [ 3 24  1  0  0  0]
 [ 2  0  4  0  0  0]
 [ 1  0  0  5  0  0]
 [ 0  1  0  0  2  0]
 [ 3  0  0  1  1 10]]

In [29]:
### svm.LinearSVC()
from sklearn import cross_validation,svm
X_train,X_test,y_train,y_test = cross_validation.train_test_split(train_feature,train_label, 
                                              test_size=0.25, random_state=0,stratify=train_label)
clf=svm.LinearSVC()
clf.fit(X_train,y_train)
print("Traing Score:%f"%clf.score(train_feature,train_label))
print("Testing Score:%f"%clf.score(test_feature,test_label))

y_predict = clf.predict(X_test)
print('\n'+classification_report(y_test,y_predict))

y_predict2 = clf.predict(test_feature)
print('\n'+classification_report(test_label,y_predict2))


Traing Score:0.476562
Testing Score:0.441860

             precision    recall  f1-score   support

          1       0.00      0.00      0.00        11
          2       0.50      1.00      0.67        12
          3       0.33      0.33      0.33         3
          5       0.67      1.00      0.80         2
          6       0.00      0.00      0.00         1
          7       1.00      0.67      0.80         3

avg / total       0.35      0.53      0.41        32


             precision    recall  f1-score   support

          1       0.00      0.00      0.00        28
          2       0.44      0.79      0.56        28
          3       0.14      0.33      0.20         6
          5       0.36      0.67      0.47         6
          6       0.00      0.00      0.00         3
          7       0.91      0.67      0.77        15

avg / total       0.34      0.44      0.36        86

//anaconda/lib/python3.5/site-packages/sklearn/metrics/classification.py:1135: UndefinedMetricWarning: Precision and F-score are ill-defined and being set to 0.0 in labels with no predicted samples.
  'precision', 'predicted', average, warn_for)

In [30]:
### svm.SVC()
from sklearn import cross_validation,svm
X_train,X_test,y_train,y_test = cross_validation.train_test_split(train_feature,train_label, 
                                              test_size=0.25, random_state=0,stratify=train_label)
clf=svm.SVC()
clf.fit(X_train,y_train)
print("Traing Score:%f"%clf.score(train_feature,train_label))
print("Testing Score:%f"%clf.score(test_feature,test_label))

y_predict = clf.predict(X_test)
print('\n'+classification_report(y_test,y_predict))

y_predict2 = clf.predict(test_feature)
print('\n'+classification_report(test_label,y_predict2))


Traing Score:0.671875
Testing Score:0.697674

             precision    recall  f1-score   support

          1       0.60      0.82      0.69        11
          2       0.67      0.67      0.67        12
          3       0.00      0.00      0.00         3
          5       1.00      1.00      1.00         2
          6       0.00      0.00      0.00         1
          7       1.00      1.00      1.00         3

avg / total       0.61      0.69      0.64        32


             precision    recall  f1-score   support

          1       0.78      0.75      0.76        28
          2       0.56      0.89      0.68        28
          3       0.00      0.00      0.00         6
          5       1.00      0.67      0.80         6
          6       0.00      0.00      0.00         3
          7       1.00      0.67      0.80        15

avg / total       0.68      0.70      0.67        86

//anaconda/lib/python3.5/site-packages/sklearn/metrics/classification.py:1135: UndefinedMetricWarning: Precision and F-score are ill-defined and being set to 0.0 in labels with no predicted samples.
  'precision', 'predicted', average, warn_for)

In [31]:
### ensemble.AdaBoostClassifier()
from sklearn import cross_validation,ensemble
X_train,X_test,y_train,y_test = cross_validation.train_test_split(train_feature,train_label, 
                                              test_size=0.25, random_state=0,stratify=train_label) #分層取樣
clf=ensemble.AdaBoostClassifier()
clf.fit(X_train,y_train)
print("Traing Score:%f"%clf.score(train_feature,train_label))
print("Testing Score:%f"%clf.score(test_feature,test_label))

y_predict = clf.predict(X_test)
print('\n'+classification_report(y_test,y_predict))

y_predict2 = clf.predict(test_feature)
print('\n'+classification_report(test_label,y_predict2))


Traing Score:0.484375
Testing Score:0.418605

             precision    recall  f1-score   support

          1       0.00      0.00      0.00        11
          2       0.43      1.00      0.60        12
          3       0.00      0.00      0.00         3
          5       0.00      0.00      0.00         2
          6       1.00      1.00      1.00         1
          7       1.00      0.67      0.80         3

avg / total       0.29      0.47      0.33        32


             precision    recall  f1-score   support

          1       0.00      0.00      0.00        28
          2       0.40      0.93      0.56        28
          3       0.00      0.00      0.00         6
          5       0.00      0.00      0.00         6
          6       0.43      1.00      0.60         3
          7       1.00      0.47      0.64        15

avg / total       0.32      0.42      0.31        86

//anaconda/lib/python3.5/site-packages/sklearn/metrics/classification.py:1135: UndefinedMetricWarning: Precision and F-score are ill-defined and being set to 0.0 in labels with no predicted samples.
  'precision', 'predicted', average, warn_for)

In [32]:
### ensemble.GradientBoostingClassifier()
from sklearn import cross_validation,ensemble
X_train,X_test,y_train,y_test = cross_validation.train_test_split(train_feature,train_label, 
                                              test_size=0.25, random_state=0,stratify=train_label) #分層取樣
clf=ensemble.GradientBoostingClassifier()
clf.fit(X_train,y_train)
print("Traing Score:%f"%clf.score(train_feature,train_label))
print("Testing Score:%f"%clf.score(test_feature,test_label))

y_predict = clf.predict(X_test)
print('\n'+classification_report(y_test,y_predict))

y_predict2 = clf.predict(test_feature)
print('\n'+classification_report(test_label,y_predict2))


Traing Score:0.929688
Testing Score:0.697674

             precision    recall  f1-score   support

          1       0.59      0.91      0.71        11
          2       0.78      0.58      0.67        12
          3       0.00      0.00      0.00         3
          5       1.00      1.00      1.00         2
          6       1.00      1.00      1.00         1
          7       1.00      1.00      1.00         3

avg / total       0.68      0.72      0.68        32


             precision    recall  f1-score   support

          1       0.81      0.79      0.80        28
          2       0.67      0.71      0.69        28
          3       0.50      0.17      0.25         6
          5       0.40      0.67      0.50         6
          6       0.40      0.67      0.50         3
          7       0.92      0.73      0.81        15

avg / total       0.72      0.70      0.70        86

//anaconda/lib/python3.5/site-packages/sklearn/metrics/classification.py:1135: UndefinedMetricWarning: Precision and F-score are ill-defined and being set to 0.0 in labels with no predicted samples.
  'precision', 'predicted', average, warn_for)

In [104]:
### ensemble.RandomForestClassifier()
from sklearn import cross_validation,ensemble
X_train,X_test,y_train,y_test = cross_validation.train_test_split(train_feature,train_label, 
                                              test_size=0.25, random_state=0,stratify=train_label) #分層取樣
clf=ensemble.RandomForestClassifier()
clf.fit(X_train,y_train)
print("Traing Score:%f"%clf.score(train_feature,train_label))
print("Testing Score:%f"%clf.score(test_feature,test_label))

y_predict = clf.predict(X_test)
print('\n'+classification_report(y_test,y_predict))

y_predict2 = clf.predict(test_feature)
print('\n'+classification_report(test_label,y_predict2))


Traing Score:0.929688
Testing Score:0.627907

             precision    recall  f1-score   support

          1       0.59      0.91      0.71        11
          2       1.00      0.67      0.80        12
          3       0.00      0.00      0.00         3
          5       1.00      1.00      1.00         2
          6       1.00      1.00      1.00         1
          7       1.00      1.00      1.00         3

avg / total       0.76      0.75      0.73        32


             precision    recall  f1-score   support

          1       0.63      0.61      0.62        28
          2       0.55      0.64      0.59        28
          3       0.40      0.33      0.36         6
          5       0.67      0.67      0.67         6
          6       0.50      0.33      0.40         3
          7       0.92      0.80      0.86        15

avg / total       0.64      0.63      0.63        86


In [76]:
# XGBClassifier()
from xgboost import XGBClassifier
X_train,X_test,y_train,y_test = cross_validation.train_test_split(train_feature,train_label, 
                                              test_size=0.25, random_state=0,stratify=train_label) #分層取樣
clf=XGBClassifier()
clf.fit(X_train,y_train)
print("Traing Score:%f"%clf.score(train_feature,train_label))
print("Testing Score:%f"%clf.score(test_feature,test_label))

y_predict = clf.predict(X_test)
print('\n'+classification_report(y_test,y_predict))

y_predict2 = clf.predict(test_feature)
print('\n'+classification_report(test_label,y_predict2))


Traing Score:0.937500
Testing Score:0.697674

             precision    recall  f1-score   support

          1       0.61      1.00      0.76        11
          2       0.88      0.58      0.70        12
          3       0.00      0.00      0.00         3
          5       1.00      1.00      1.00         2
          6       1.00      1.00      1.00         1
          7       1.00      1.00      1.00         3

avg / total       0.73      0.75      0.71        32


             precision    recall  f1-score   support

          1       0.76      0.79      0.77        28
          2       0.63      0.68      0.66        28
          3       0.50      0.17      0.25         6
          5       0.40      0.67      0.50         6
          6       1.00      0.67      0.80         3
          7       0.92      0.80      0.86        15

avg / total       0.71      0.70      0.69        86

//anaconda/lib/python3.5/site-packages/sklearn/metrics/classification.py:1135: UndefinedMetricWarning: Precision and F-score are ill-defined and being set to 0.0 in labels with no predicted samples.
  'precision', 'predicted', average, warn_for)

In [105]:
# confusion matrix
prediction2 = clf.predict(test_feature)
prediction2_list = prediction2.reshape(-1).astype(int)
label2_list = test_label.astype(int)

print(classification_report(label2_list, prediction2_list))
print(confusion_matrix(label2_list, prediction2_list))

# conf heatmap
conf = confusion_matrix(label2_list, prediction2_list)
f, ax= plt.subplots(figsize = (7, 6))
sns.heatmap(conf, annot=True, ax=ax, fmt='d', annot_kws={'size':20}) 
ax.xaxis.set_ticks_position('top') #Making x label be on top is common in textbooks.
plt.show()


             precision    recall  f1-score   support

          1       0.63      0.61      0.62        28
          2       0.55      0.64      0.59        28
          3       0.40      0.33      0.36         6
          5       0.67      0.67      0.67         6
          6       0.50      0.33      0.40         3
          7       0.92      0.80      0.86        15

avg / total       0.64      0.63      0.63        86

[[17  8  3  0  0  0]
 [ 7 18  0  2  1  0]
 [ 2  2  2  0  0  0]
 [ 0  1  0  4  0  1]
 [ 0  2  0  0  1  0]
 [ 1  2  0  0  0 12]]

In [77]:
# confusion matrix
prediction2 = clf.predict(test_feature)
prediction2_list = prediction2.reshape(-1).astype(int)
label2_list = test_label.astype(int)

print(classification_report(label2_list, prediction2_list))
print(confusion_matrix(label2_list, prediction2_list))

# conf heatmap
conf = confusion_matrix(label2_list, prediction2_list)
f, ax= plt.subplots(figsize = (7, 6))
sns.heatmap(conf, annot=True, ax=ax, fmt='d', annot_kws={'size':20}) 
ax.xaxis.set_ticks_position('top') #Making x label be on top is common in textbooks.
plt.show()


             precision    recall  f1-score   support

          1       0.76      0.79      0.77        28
          2       0.63      0.68      0.66        28
          3       0.50      0.17      0.25         6
          5       0.40      0.67      0.50         6
          6       1.00      0.67      0.80         3
          7       0.92      0.80      0.86        15

avg / total       0.71      0.70      0.69        86

[[22  6  0  0  0  0]
 [ 2 19  1  6  0  0]
 [ 4  1  1  0  0  0]
 [ 0  1  0  4  0  1]
 [ 0  1  0  0  2  0]
 [ 1  2  0  0  0 12]]

Keras: MLP,效果輸ensemble


In [78]:
# Standardize
scaler = MinMaxScaler()
scaler.fit(train_feature)
train_feature_trans = scaler.transform(train_feature)
test_feature_trans = scaler.transform(test_feature)

In [89]:
# Onehot encoding
from sklearn.preprocessing import OneHotEncoder
enc = OneHotEncoder()
enc.fit(train_label.reshape(-1,1))
train_label_OHE = enc.transform(train_label.reshape(-1,1)).toarray()
test_label_OHE = enc.transform(test_label.reshape(-1,1)).toarray()

In [90]:
train_label_OHE[0:5]


Out[90]:
array([[ 1.,  0.,  0.,  0.,  0.,  0.],
       [ 0.,  0.,  0.,  0.,  0.,  1.],
       [ 1.,  0.,  0.,  0.,  0.,  0.],
       [ 0.,  0.,  0.,  0.,  0.,  1.],
       [ 0.,  1.,  0.,  0.,  0.,  0.]])

In [100]:
# Keras MLP models: categorical_clf
from keras.models import Sequential
from keras.layers import Dense
from keras.layers import Dropout

def show_train_history(train_history,train,validation):
    plt.plot(train_history.history[train])
    plt.plot(train_history.history[validation])
    plt.title('Train History')
    plt.ylabel(train)
    plt.xlabel('Epoch')
    plt.legend(['train', 'validation'], loc='best')
    plt.show()

model = Sequential()
model.add(Dense(units=200,
                input_dim=9,
                kernel_initializer='uniform',
                activation='relu'))
model.add(Dropout(0.5))

model.add(Dense(units=200,
                kernel_initializer='uniform',
                activation='relu'))
model.add(Dropout(0.5))

model.add(Dense(units=6, #輸出onehot encoding的陣列
                kernel_initializer='uniform',
                activation='softmax'))

print(model.summary()) #可以清楚看到model還有參數數量

model.compile(loss='categorical_crossentropy', #多元用categorical
              optimizer='adam', metrics=['accuracy'])

train_history = model.fit(x=train_feature_trans, y=train_label_OHE,  #上面多分割一步在keras是內建的
                          validation_split=0.8, epochs=500,
                          batch_size=2000, verbose=2) #verbose=2表示顯示訓練過程

show_train_history(train_history,'acc','val_acc')
show_train_history(train_history,'loss','val_loss')

scores = model.evaluate(test_feature_trans, test_label_OHE)
print('\n')
print('accuracy=',scores[1])

prediction = model.predict_classes(test_feature_trans)


_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
dense_33 (Dense)             (None, 200)               2000      
_________________________________________________________________
dropout_23 (Dropout)         (None, 200)               0         
_________________________________________________________________
dense_34 (Dense)             (None, 200)               40200     
_________________________________________________________________
dropout_24 (Dropout)         (None, 200)               0         
_________________________________________________________________
dense_35 (Dense)             (None, 6)                 1206      
=================================================================
Total params: 43,406
Trainable params: 43,406
Non-trainable params: 0
_________________________________________________________________
None
Train on 25 samples, validate on 103 samples
Epoch 1/500
0s - loss: 1.7885 - acc: 0.3600 - val_loss: 1.7882 - val_acc: 0.4272
Epoch 2/500
0s - loss: 1.7884 - acc: 0.2800 - val_loss: 1.7855 - val_acc: 0.4466
Epoch 3/500
0s - loss: 1.7850 - acc: 0.3200 - val_loss: 1.7821 - val_acc: 0.4563
Epoch 4/500
0s - loss: 1.7825 - acc: 0.4400 - val_loss: 1.7785 - val_acc: 0.4563
Epoch 5/500
0s - loss: 1.7786 - acc: 0.3200 - val_loss: 1.7746 - val_acc: 0.4563
Epoch 6/500
0s - loss: 1.7766 - acc: 0.2800 - val_loss: 1.7703 - val_acc: 0.4563
Epoch 7/500
0s - loss: 1.7708 - acc: 0.4400 - val_loss: 1.7656 - val_acc: 0.4563
Epoch 8/500
0s - loss: 1.7682 - acc: 0.5200 - val_loss: 1.7605 - val_acc: 0.4563
Epoch 9/500
0s - loss: 1.7599 - acc: 0.3600 - val_loss: 1.7548 - val_acc: 0.4563
Epoch 10/500
0s - loss: 1.7516 - acc: 0.4800 - val_loss: 1.7484 - val_acc: 0.4563
Epoch 11/500
0s - loss: 1.7525 - acc: 0.3600 - val_loss: 1.7412 - val_acc: 0.4563
Epoch 12/500
0s - loss: 1.7421 - acc: 0.4400 - val_loss: 1.7332 - val_acc: 0.4563
Epoch 13/500
0s - loss: 1.7370 - acc: 0.4400 - val_loss: 1.7244 - val_acc: 0.4272
Epoch 14/500
0s - loss: 1.7332 - acc: 0.4000 - val_loss: 1.7149 - val_acc: 0.4078
Epoch 15/500
0s - loss: 1.7167 - acc: 0.4800 - val_loss: 1.7043 - val_acc: 0.4078
Epoch 16/500
0s - loss: 1.6972 - acc: 0.4400 - val_loss: 1.6928 - val_acc: 0.4078
Epoch 17/500
0s - loss: 1.7003 - acc: 0.3200 - val_loss: 1.6802 - val_acc: 0.4078
Epoch 18/500
0s - loss: 1.6886 - acc: 0.4000 - val_loss: 1.6670 - val_acc: 0.3981
Epoch 19/500
0s - loss: 1.6717 - acc: 0.3600 - val_loss: 1.6533 - val_acc: 0.3981
Epoch 20/500
0s - loss: 1.6631 - acc: 0.3600 - val_loss: 1.6393 - val_acc: 0.3883
Epoch 21/500
0s - loss: 1.6678 - acc: 0.3200 - val_loss: 1.6252 - val_acc: 0.3883
Epoch 22/500
0s - loss: 1.6386 - acc: 0.3200 - val_loss: 1.6110 - val_acc: 0.3883
Epoch 23/500
0s - loss: 1.6285 - acc: 0.3600 - val_loss: 1.5967 - val_acc: 0.3883
Epoch 24/500
0s - loss: 1.6211 - acc: 0.3600 - val_loss: 1.5831 - val_acc: 0.3883
Epoch 25/500
0s - loss: 1.6000 - acc: 0.3200 - val_loss: 1.5701 - val_acc: 0.3883
Epoch 26/500
0s - loss: 1.5984 - acc: 0.4000 - val_loss: 1.5584 - val_acc: 0.3883
Epoch 27/500
0s - loss: 1.5695 - acc: 0.3200 - val_loss: 1.5478 - val_acc: 0.3981
Epoch 28/500
0s - loss: 1.5724 - acc: 0.3600 - val_loss: 1.5391 - val_acc: 0.3981
Epoch 29/500
0s - loss: 1.5703 - acc: 0.3600 - val_loss: 1.5321 - val_acc: 0.4078
Epoch 30/500
0s - loss: 1.5550 - acc: 0.3600 - val_loss: 1.5264 - val_acc: 0.4078
Epoch 31/500
0s - loss: 1.5123 - acc: 0.4000 - val_loss: 1.5221 - val_acc: 0.4078
Epoch 32/500
0s - loss: 1.5586 - acc: 0.4400 - val_loss: 1.5184 - val_acc: 0.4078
Epoch 33/500
0s - loss: 1.5265 - acc: 0.4000 - val_loss: 1.5147 - val_acc: 0.4272
Epoch 34/500
0s - loss: 1.5190 - acc: 0.4800 - val_loss: 1.5112 - val_acc: 0.4466
Epoch 35/500
0s - loss: 1.5135 - acc: 0.4000 - val_loss: 1.5074 - val_acc: 0.4563
Epoch 36/500
0s - loss: 1.5253 - acc: 0.5200 - val_loss: 1.5027 - val_acc: 0.4563
Epoch 37/500
0s - loss: 1.4985 - acc: 0.5200 - val_loss: 1.4976 - val_acc: 0.4563
Epoch 38/500
0s - loss: 1.4527 - acc: 0.4800 - val_loss: 1.4921 - val_acc: 0.4563
Epoch 39/500
0s - loss: 1.4843 - acc: 0.4800 - val_loss: 1.4855 - val_acc: 0.4563
Epoch 40/500
0s - loss: 1.3936 - acc: 0.5600 - val_loss: 1.4778 - val_acc: 0.4563
Epoch 41/500
0s - loss: 1.3973 - acc: 0.5600 - val_loss: 1.4691 - val_acc: 0.4563
Epoch 42/500
0s - loss: 1.4130 - acc: 0.5600 - val_loss: 1.4592 - val_acc: 0.4563
Epoch 43/500
0s - loss: 1.4122 - acc: 0.5600 - val_loss: 1.4487 - val_acc: 0.4563
Epoch 44/500
0s - loss: 1.3762 - acc: 0.5600 - val_loss: 1.4375 - val_acc: 0.4563
Epoch 45/500
0s - loss: 1.3509 - acc: 0.5600 - val_loss: 1.4256 - val_acc: 0.4563
Epoch 46/500
0s - loss: 1.3600 - acc: 0.5200 - val_loss: 1.4139 - val_acc: 0.4563
Epoch 47/500
0s - loss: 1.3415 - acc: 0.5200 - val_loss: 1.4015 - val_acc: 0.4563
Epoch 48/500
0s - loss: 1.3484 - acc: 0.5600 - val_loss: 1.3893 - val_acc: 0.4563
Epoch 49/500
0s - loss: 1.3015 - acc: 0.5200 - val_loss: 1.3765 - val_acc: 0.4563
Epoch 50/500
0s - loss: 1.3315 - acc: 0.5600 - val_loss: 1.3640 - val_acc: 0.4563
Epoch 51/500
0s - loss: 1.2918 - acc: 0.5600 - val_loss: 1.3516 - val_acc: 0.4660
Epoch 52/500
0s - loss: 1.2972 - acc: 0.5600 - val_loss: 1.3388 - val_acc: 0.4660
Epoch 53/500
0s - loss: 1.2643 - acc: 0.5200 - val_loss: 1.3270 - val_acc: 0.4660
Epoch 54/500
0s - loss: 1.2928 - acc: 0.5600 - val_loss: 1.3152 - val_acc: 0.4563
Epoch 55/500
0s - loss: 1.2116 - acc: 0.5600 - val_loss: 1.3042 - val_acc: 0.4563
Epoch 56/500
0s - loss: 1.2142 - acc: 0.5600 - val_loss: 1.2938 - val_acc: 0.4563
Epoch 57/500
0s - loss: 1.2366 - acc: 0.5600 - val_loss: 1.2839 - val_acc: 0.4563
Epoch 58/500
0s - loss: 1.1662 - acc: 0.5600 - val_loss: 1.2741 - val_acc: 0.4563
Epoch 59/500
0s - loss: 1.1569 - acc: 0.5600 - val_loss: 1.2646 - val_acc: 0.4466
Epoch 60/500
0s - loss: 1.1866 - acc: 0.5600 - val_loss: 1.2551 - val_acc: 0.4466
Epoch 61/500
0s - loss: 1.1110 - acc: 0.5600 - val_loss: 1.2456 - val_acc: 0.4466
Epoch 62/500
0s - loss: 1.1294 - acc: 0.5600 - val_loss: 1.2364 - val_acc: 0.4466
Epoch 63/500
0s - loss: 1.1252 - acc: 0.5600 - val_loss: 1.2272 - val_acc: 0.4466
Epoch 64/500
0s - loss: 1.0883 - acc: 0.5600 - val_loss: 1.2184 - val_acc: 0.4466
Epoch 65/500
0s - loss: 1.0978 - acc: 0.5600 - val_loss: 1.2099 - val_acc: 0.4466
Epoch 66/500
0s - loss: 1.0954 - acc: 0.5600 - val_loss: 1.2013 - val_acc: 0.4466
Epoch 67/500
0s - loss: 1.0382 - acc: 0.5600 - val_loss: 1.1930 - val_acc: 0.4466
Epoch 68/500
0s - loss: 1.0981 - acc: 0.5600 - val_loss: 1.1853 - val_acc: 0.4466
Epoch 69/500
0s - loss: 1.0195 - acc: 0.5600 - val_loss: 1.1778 - val_acc: 0.4563
Epoch 70/500
0s - loss: 1.0652 - acc: 0.5600 - val_loss: 1.1710 - val_acc: 0.4563
Epoch 71/500
0s - loss: 1.0340 - acc: 0.5600 - val_loss: 1.1650 - val_acc: 0.4563
Epoch 72/500
0s - loss: 1.0036 - acc: 0.5600 - val_loss: 1.1593 - val_acc: 0.4563
Epoch 73/500
0s - loss: 0.9786 - acc: 0.5600 - val_loss: 1.1542 - val_acc: 0.4563
Epoch 74/500
0s - loss: 0.9394 - acc: 0.5600 - val_loss: 1.1497 - val_acc: 0.4563
Epoch 75/500
0s - loss: 0.9691 - acc: 0.6000 - val_loss: 1.1452 - val_acc: 0.4563
Epoch 76/500
0s - loss: 0.9691 - acc: 0.5600 - val_loss: 1.1408 - val_acc: 0.4563
Epoch 77/500
0s - loss: 0.9663 - acc: 0.5600 - val_loss: 1.1361 - val_acc: 0.4563
Epoch 78/500
0s - loss: 0.9584 - acc: 0.5600 - val_loss: 1.1313 - val_acc: 0.4466
Epoch 79/500
0s - loss: 0.9625 - acc: 0.5600 - val_loss: 1.1267 - val_acc: 0.4466
Epoch 80/500
0s - loss: 0.9472 - acc: 0.5600 - val_loss: 1.1216 - val_acc: 0.4466
Epoch 81/500
0s - loss: 0.9609 - acc: 0.5600 - val_loss: 1.1165 - val_acc: 0.4466
Epoch 82/500
0s - loss: 0.9076 - acc: 0.6000 - val_loss: 1.1117 - val_acc: 0.4466
Epoch 83/500
0s - loss: 0.9473 - acc: 0.5600 - val_loss: 1.1067 - val_acc: 0.4563
Epoch 84/500
0s - loss: 0.9354 - acc: 0.5600 - val_loss: 1.1024 - val_acc: 0.4563
Epoch 85/500
0s - loss: 0.8972 - acc: 0.6000 - val_loss: 1.0981 - val_acc: 0.4563
Epoch 86/500
0s - loss: 0.9618 - acc: 0.5600 - val_loss: 1.0939 - val_acc: 0.4660
Epoch 87/500
0s - loss: 0.8423 - acc: 0.6000 - val_loss: 1.0901 - val_acc: 0.4660
Epoch 88/500
0s - loss: 0.8641 - acc: 0.6000 - val_loss: 1.0866 - val_acc: 0.4660
Epoch 89/500
0s - loss: 0.8991 - acc: 0.5600 - val_loss: 1.0831 - val_acc: 0.4660
Epoch 90/500
0s - loss: 0.8895 - acc: 0.5600 - val_loss: 1.0798 - val_acc: 0.4660
Epoch 91/500
0s - loss: 0.8478 - acc: 0.6800 - val_loss: 1.0769 - val_acc: 0.4660
Epoch 92/500
0s - loss: 0.8190 - acc: 0.5600 - val_loss: 1.0742 - val_acc: 0.4660
Epoch 93/500
0s - loss: 0.8397 - acc: 0.6400 - val_loss: 1.0717 - val_acc: 0.4660
Epoch 94/500
0s - loss: 0.8846 - acc: 0.6000 - val_loss: 1.0695 - val_acc: 0.4660
Epoch 95/500
0s - loss: 0.8710 - acc: 0.5600 - val_loss: 1.0676 - val_acc: 0.4660
Epoch 96/500
0s - loss: 0.9013 - acc: 0.6000 - val_loss: 1.0653 - val_acc: 0.4660
Epoch 97/500
0s - loss: 0.8925 - acc: 0.6000 - val_loss: 1.0629 - val_acc: 0.4660
Epoch 98/500
0s - loss: 0.8091 - acc: 0.6000 - val_loss: 1.0601 - val_acc: 0.4660
Epoch 99/500
0s - loss: 0.8428 - acc: 0.5600 - val_loss: 1.0571 - val_acc: 0.4660
Epoch 100/500
0s - loss: 0.8356 - acc: 0.6400 - val_loss: 1.0537 - val_acc: 0.4660
Epoch 101/500
0s - loss: 0.8403 - acc: 0.6400 - val_loss: 1.0503 - val_acc: 0.4660
Epoch 102/500
0s - loss: 0.8239 - acc: 0.6400 - val_loss: 1.0469 - val_acc: 0.4660
Epoch 103/500
0s - loss: 0.7616 - acc: 0.7600 - val_loss: 1.0441 - val_acc: 0.4660
Epoch 104/500
0s - loss: 0.8201 - acc: 0.6400 - val_loss: 1.0415 - val_acc: 0.4660
Epoch 105/500
0s - loss: 0.7524 - acc: 0.7600 - val_loss: 1.0388 - val_acc: 0.4660
Epoch 106/500
0s - loss: 0.8330 - acc: 0.6000 - val_loss: 1.0362 - val_acc: 0.4660
Epoch 107/500
0s - loss: 0.8116 - acc: 0.6800 - val_loss: 1.0335 - val_acc: 0.4660
Epoch 108/500
0s - loss: 0.8553 - acc: 0.6000 - val_loss: 1.0305 - val_acc: 0.4660
Epoch 109/500
0s - loss: 0.7577 - acc: 0.7200 - val_loss: 1.0278 - val_acc: 0.4660
Epoch 110/500
0s - loss: 0.7410 - acc: 0.6800 - val_loss: 1.0252 - val_acc: 0.4660
Epoch 111/500
0s - loss: 0.7295 - acc: 0.7600 - val_loss: 1.0225 - val_acc: 0.4660
Epoch 112/500
0s - loss: 0.7789 - acc: 0.7200 - val_loss: 1.0199 - val_acc: 0.4660
Epoch 113/500
0s - loss: 0.7674 - acc: 0.6400 - val_loss: 1.0180 - val_acc: 0.4660
Epoch 114/500
0s - loss: 0.7531 - acc: 0.6400 - val_loss: 1.0164 - val_acc: 0.4660
Epoch 115/500
0s - loss: 0.7674 - acc: 0.6800 - val_loss: 1.0152 - val_acc: 0.4660
Epoch 116/500
0s - loss: 0.7119 - acc: 0.8000 - val_loss: 1.0144 - val_acc: 0.4660
Epoch 117/500
0s - loss: 0.7828 - acc: 0.6000 - val_loss: 1.0136 - val_acc: 0.4660
Epoch 118/500
0s - loss: 0.7995 - acc: 0.7600 - val_loss: 1.0124 - val_acc: 0.4660
Epoch 119/500
0s - loss: 0.7830 - acc: 0.6000 - val_loss: 1.0111 - val_acc: 0.4757
Epoch 120/500
0s - loss: 0.7762 - acc: 0.6000 - val_loss: 1.0090 - val_acc: 0.4757
Epoch 121/500
0s - loss: 0.7744 - acc: 0.6400 - val_loss: 1.0072 - val_acc: 0.4757
Epoch 122/500
0s - loss: 0.7391 - acc: 0.7200 - val_loss: 1.0055 - val_acc: 0.4757
Epoch 123/500
0s - loss: 0.7472 - acc: 0.6800 - val_loss: 1.0037 - val_acc: 0.4757
Epoch 124/500
0s - loss: 0.8250 - acc: 0.6400 - val_loss: 1.0019 - val_acc: 0.4757
Epoch 125/500
0s - loss: 0.6733 - acc: 0.6800 - val_loss: 1.0008 - val_acc: 0.4757
Epoch 126/500
0s - loss: 0.7403 - acc: 0.6800 - val_loss: 1.0004 - val_acc: 0.4757
Epoch 127/500
0s - loss: 0.7466 - acc: 0.6800 - val_loss: 1.0005 - val_acc: 0.4757
Epoch 128/500
0s - loss: 0.7032 - acc: 0.7200 - val_loss: 1.0015 - val_acc: 0.4757
Epoch 129/500
0s - loss: 0.7310 - acc: 0.6400 - val_loss: 1.0023 - val_acc: 0.4757
Epoch 130/500
0s - loss: 0.7359 - acc: 0.6000 - val_loss: 1.0020 - val_acc: 0.4757
Epoch 131/500
0s - loss: 0.6623 - acc: 0.6800 - val_loss: 1.0024 - val_acc: 0.4757
Epoch 132/500
0s - loss: 0.7560 - acc: 0.6400 - val_loss: 1.0034 - val_acc: 0.4757
Epoch 133/500
0s - loss: 0.6345 - acc: 0.7600 - val_loss: 1.0025 - val_acc: 0.4757
Epoch 134/500
0s - loss: 0.7076 - acc: 0.6800 - val_loss: 1.0001 - val_acc: 0.4757
Epoch 135/500
0s - loss: 0.7628 - acc: 0.7200 - val_loss: 0.9971 - val_acc: 0.4757
Epoch 136/500
0s - loss: 0.7612 - acc: 0.6400 - val_loss: 0.9926 - val_acc: 0.4757
Epoch 137/500
0s - loss: 0.6940 - acc: 0.6400 - val_loss: 0.9893 - val_acc: 0.4854
Epoch 138/500
0s - loss: 0.7269 - acc: 0.6800 - val_loss: 0.9852 - val_acc: 0.5049
Epoch 139/500
0s - loss: 0.6995 - acc: 0.7600 - val_loss: 0.9819 - val_acc: 0.5049
Epoch 140/500
0s - loss: 0.6140 - acc: 0.8000 - val_loss: 0.9783 - val_acc: 0.5049
Epoch 141/500
0s - loss: 0.7124 - acc: 0.6800 - val_loss: 0.9762 - val_acc: 0.4951
Epoch 142/500
0s - loss: 0.6994 - acc: 0.7200 - val_loss: 0.9750 - val_acc: 0.4951
Epoch 143/500
0s - loss: 0.6569 - acc: 0.7600 - val_loss: 0.9736 - val_acc: 0.4951
Epoch 144/500
0s - loss: 0.6438 - acc: 0.8400 - val_loss: 0.9729 - val_acc: 0.4854
Epoch 145/500
0s - loss: 0.6736 - acc: 0.6800 - val_loss: 0.9728 - val_acc: 0.4854
Epoch 146/500
0s - loss: 0.6622 - acc: 0.6800 - val_loss: 0.9733 - val_acc: 0.4854
Epoch 147/500
0s - loss: 0.7244 - acc: 0.7600 - val_loss: 0.9744 - val_acc: 0.4951
Epoch 148/500
0s - loss: 0.6147 - acc: 0.8000 - val_loss: 0.9764 - val_acc: 0.4951
Epoch 149/500
0s - loss: 0.6297 - acc: 0.6800 - val_loss: 0.9774 - val_acc: 0.5049
Epoch 150/500
0s - loss: 0.5745 - acc: 0.8800 - val_loss: 0.9777 - val_acc: 0.5049
Epoch 151/500
0s - loss: 0.6400 - acc: 0.7200 - val_loss: 0.9758 - val_acc: 0.5049
Epoch 152/500
0s - loss: 0.6745 - acc: 0.7200 - val_loss: 0.9722 - val_acc: 0.4951
Epoch 153/500
0s - loss: 0.6226 - acc: 0.8000 - val_loss: 0.9685 - val_acc: 0.4854
Epoch 154/500
0s - loss: 0.5792 - acc: 0.8000 - val_loss: 0.9656 - val_acc: 0.4854
Epoch 155/500
0s - loss: 0.6608 - acc: 0.6800 - val_loss: 0.9631 - val_acc: 0.4854
Epoch 156/500
0s - loss: 0.5867 - acc: 0.7200 - val_loss: 0.9618 - val_acc: 0.4854
Epoch 157/500
0s - loss: 0.6208 - acc: 0.7200 - val_loss: 0.9612 - val_acc: 0.4854
Epoch 158/500
0s - loss: 0.6312 - acc: 0.7200 - val_loss: 0.9605 - val_acc: 0.4854
Epoch 159/500
0s - loss: 0.5902 - acc: 0.8000 - val_loss: 0.9597 - val_acc: 0.4854
Epoch 160/500
0s - loss: 0.6524 - acc: 0.7600 - val_loss: 0.9595 - val_acc: 0.4854
Epoch 161/500
0s - loss: 0.6417 - acc: 0.7200 - val_loss: 0.9601 - val_acc: 0.4854
Epoch 162/500
0s - loss: 0.6103 - acc: 0.8000 - val_loss: 0.9613 - val_acc: 0.4854
Epoch 163/500
0s - loss: 0.5909 - acc: 0.8000 - val_loss: 0.9624 - val_acc: 0.4854
Epoch 164/500
0s - loss: 0.6024 - acc: 0.7600 - val_loss: 0.9641 - val_acc: 0.4854
Epoch 165/500
0s - loss: 0.6340 - acc: 0.8400 - val_loss: 0.9654 - val_acc: 0.4854
Epoch 166/500
0s - loss: 0.6420 - acc: 0.6400 - val_loss: 0.9666 - val_acc: 0.4854
Epoch 167/500
0s - loss: 0.5624 - acc: 0.7600 - val_loss: 0.9675 - val_acc: 0.4951
Epoch 168/500
0s - loss: 0.5754 - acc: 0.8400 - val_loss: 0.9672 - val_acc: 0.5049
Epoch 169/500
0s - loss: 0.7005 - acc: 0.6800 - val_loss: 0.9638 - val_acc: 0.5146
Epoch 170/500
0s - loss: 0.5419 - acc: 0.9200 - val_loss: 0.9617 - val_acc: 0.5146
Epoch 171/500
0s - loss: 0.5751 - acc: 0.8000 - val_loss: 0.9567 - val_acc: 0.5146
Epoch 172/500
0s - loss: 0.5408 - acc: 0.8800 - val_loss: 0.9521 - val_acc: 0.5437
Epoch 173/500
0s - loss: 0.6191 - acc: 0.8000 - val_loss: 0.9474 - val_acc: 0.5825
Epoch 174/500
0s - loss: 0.5702 - acc: 0.8400 - val_loss: 0.9447 - val_acc: 0.6019
Epoch 175/500
0s - loss: 0.6345 - acc: 0.8400 - val_loss: 0.9426 - val_acc: 0.5631
Epoch 176/500
0s - loss: 0.6044 - acc: 0.7600 - val_loss: 0.9422 - val_acc: 0.5631
Epoch 177/500
0s - loss: 0.6073 - acc: 0.8400 - val_loss: 0.9430 - val_acc: 0.5825
Epoch 178/500
0s - loss: 0.5925 - acc: 0.7600 - val_loss: 0.9444 - val_acc: 0.5922
Epoch 179/500
0s - loss: 0.5218 - acc: 0.8400 - val_loss: 0.9462 - val_acc: 0.6019
Epoch 180/500
0s - loss: 0.5816 - acc: 0.8000 - val_loss: 0.9502 - val_acc: 0.5922
Epoch 181/500
0s - loss: 0.6052 - acc: 0.8000 - val_loss: 0.9523 - val_acc: 0.5922
Epoch 182/500
0s - loss: 0.5716 - acc: 0.8000 - val_loss: 0.9538 - val_acc: 0.5825
Epoch 183/500
0s - loss: 0.5668 - acc: 0.8400 - val_loss: 0.9555 - val_acc: 0.5825
Epoch 184/500
0s - loss: 0.5264 - acc: 0.8800 - val_loss: 0.9563 - val_acc: 0.5825
Epoch 185/500
0s - loss: 0.5475 - acc: 0.8400 - val_loss: 0.9568 - val_acc: 0.5728
Epoch 186/500
0s - loss: 0.6399 - acc: 0.6800 - val_loss: 0.9560 - val_acc: 0.5728
Epoch 187/500
0s - loss: 0.5340 - acc: 0.9200 - val_loss: 0.9558 - val_acc: 0.5728
Epoch 188/500
0s - loss: 0.5380 - acc: 0.8000 - val_loss: 0.9564 - val_acc: 0.5728
Epoch 189/500
0s - loss: 0.4818 - acc: 0.8400 - val_loss: 0.9585 - val_acc: 0.5534
Epoch 190/500
0s - loss: 0.5503 - acc: 0.6800 - val_loss: 0.9612 - val_acc: 0.5340
Epoch 191/500
0s - loss: 0.5798 - acc: 0.8000 - val_loss: 0.9677 - val_acc: 0.5243
Epoch 192/500
0s - loss: 0.5112 - acc: 0.8400 - val_loss: 0.9733 - val_acc: 0.5243
Epoch 193/500
0s - loss: 0.5294 - acc: 0.8000 - val_loss: 0.9794 - val_acc: 0.5243
Epoch 194/500
0s - loss: 0.5295 - acc: 0.8000 - val_loss: 0.9842 - val_acc: 0.5146
Epoch 195/500
0s - loss: 0.5217 - acc: 0.8000 - val_loss: 0.9845 - val_acc: 0.5146
Epoch 196/500
0s - loss: 0.5082 - acc: 0.8400 - val_loss: 0.9799 - val_acc: 0.5243
Epoch 197/500
0s - loss: 0.5627 - acc: 0.8400 - val_loss: 0.9734 - val_acc: 0.5243
Epoch 198/500
0s - loss: 0.4567 - acc: 0.8400 - val_loss: 0.9687 - val_acc: 0.5437
Epoch 199/500
0s - loss: 0.5028 - acc: 0.8400 - val_loss: 0.9628 - val_acc: 0.5437
Epoch 200/500
0s - loss: 0.5116 - acc: 0.8400 - val_loss: 0.9591 - val_acc: 0.5631
Epoch 201/500
0s - loss: 0.4628 - acc: 0.8800 - val_loss: 0.9560 - val_acc: 0.5825
Epoch 202/500
0s - loss: 0.5831 - acc: 0.8400 - val_loss: 0.9550 - val_acc: 0.5728
Epoch 203/500
0s - loss: 0.4698 - acc: 0.8800 - val_loss: 0.9549 - val_acc: 0.5728
Epoch 204/500
0s - loss: 0.4910 - acc: 0.8800 - val_loss: 0.9553 - val_acc: 0.5728
Epoch 205/500
0s - loss: 0.5028 - acc: 0.8400 - val_loss: 0.9547 - val_acc: 0.5728
Epoch 206/500
0s - loss: 0.4581 - acc: 0.8400 - val_loss: 0.9547 - val_acc: 0.5728
Epoch 207/500
0s - loss: 0.4833 - acc: 0.8400 - val_loss: 0.9560 - val_acc: 0.5922
Epoch 208/500
0s - loss: 0.4857 - acc: 0.8800 - val_loss: 0.9587 - val_acc: 0.6019
Epoch 209/500
0s - loss: 0.4511 - acc: 0.9200 - val_loss: 0.9643 - val_acc: 0.5922
Epoch 210/500
0s - loss: 0.5063 - acc: 0.8000 - val_loss: 0.9713 - val_acc: 0.5631
Epoch 211/500
0s - loss: 0.4946 - acc: 0.8400 - val_loss: 0.9792 - val_acc: 0.5340
Epoch 212/500
0s - loss: 0.4645 - acc: 0.8800 - val_loss: 0.9860 - val_acc: 0.5243
Epoch 213/500
0s - loss: 0.4728 - acc: 0.8000 - val_loss: 0.9893 - val_acc: 0.5243
Epoch 214/500
0s - loss: 0.4974 - acc: 0.8800 - val_loss: 0.9913 - val_acc: 0.5340
Epoch 215/500
0s - loss: 0.3946 - acc: 0.8800 - val_loss: 0.9871 - val_acc: 0.5437
Epoch 216/500
0s - loss: 0.5025 - acc: 0.8400 - val_loss: 0.9816 - val_acc: 0.5631
Epoch 217/500
0s - loss: 0.5528 - acc: 0.8400 - val_loss: 0.9773 - val_acc: 0.5728
Epoch 218/500
0s - loss: 0.5260 - acc: 0.8000 - val_loss: 0.9735 - val_acc: 0.5728
Epoch 219/500
0s - loss: 0.4747 - acc: 0.8400 - val_loss: 0.9718 - val_acc: 0.5728
Epoch 220/500
0s - loss: 0.4443 - acc: 0.8800 - val_loss: 0.9712 - val_acc: 0.5728
Epoch 221/500
0s - loss: 0.4345 - acc: 0.8800 - val_loss: 0.9703 - val_acc: 0.5631
Epoch 222/500
0s - loss: 0.4401 - acc: 0.9200 - val_loss: 0.9708 - val_acc: 0.5728
Epoch 223/500
0s - loss: 0.4579 - acc: 0.8800 - val_loss: 0.9734 - val_acc: 0.5728
Epoch 224/500
0s - loss: 0.5458 - acc: 0.7600 - val_loss: 0.9762 - val_acc: 0.5728
Epoch 225/500
0s - loss: 0.3882 - acc: 0.8800 - val_loss: 0.9815 - val_acc: 0.5631
Epoch 226/500
0s - loss: 0.4337 - acc: 0.9200 - val_loss: 0.9875 - val_acc: 0.5631
Epoch 227/500
0s - loss: 0.4175 - acc: 0.9200 - val_loss: 0.9922 - val_acc: 0.5534
Epoch 228/500
0s - loss: 0.4115 - acc: 0.9200 - val_loss: 0.9965 - val_acc: 0.5534
Epoch 229/500
0s - loss: 0.4266 - acc: 0.8800 - val_loss: 1.0020 - val_acc: 0.5534
Epoch 230/500
0s - loss: 0.4024 - acc: 0.8800 - val_loss: 1.0074 - val_acc: 0.5534
Epoch 231/500
0s - loss: 0.5393 - acc: 0.8400 - val_loss: 1.0090 - val_acc: 0.5534
Epoch 232/500
0s - loss: 0.4645 - acc: 0.8000 - val_loss: 1.0098 - val_acc: 0.5631
Epoch 233/500
0s - loss: 0.4588 - acc: 0.8400 - val_loss: 1.0078 - val_acc: 0.5437
Epoch 234/500
0s - loss: 0.4240 - acc: 0.9200 - val_loss: 1.0055 - val_acc: 0.5534
Epoch 235/500
0s - loss: 0.3930 - acc: 0.9200 - val_loss: 1.0042 - val_acc: 0.5728
Epoch 236/500
0s - loss: 0.4804 - acc: 0.8400 - val_loss: 1.0007 - val_acc: 0.5922
Epoch 237/500
0s - loss: 0.4979 - acc: 0.8000 - val_loss: 1.0001 - val_acc: 0.5922
Epoch 238/500
0s - loss: 0.4782 - acc: 0.8400 - val_loss: 1.0009 - val_acc: 0.6019
Epoch 239/500
0s - loss: 0.4398 - acc: 0.8800 - val_loss: 1.0007 - val_acc: 0.6019
Epoch 240/500
0s - loss: 0.4576 - acc: 0.8000 - val_loss: 1.0046 - val_acc: 0.6019
Epoch 241/500
0s - loss: 0.3782 - acc: 0.8800 - val_loss: 1.0130 - val_acc: 0.5825
Epoch 242/500
0s - loss: 0.4024 - acc: 0.8800 - val_loss: 1.0189 - val_acc: 0.5534
Epoch 243/500
0s - loss: 0.4363 - acc: 0.8800 - val_loss: 1.0236 - val_acc: 0.5534
Epoch 244/500
0s - loss: 0.4035 - acc: 0.8800 - val_loss: 1.0282 - val_acc: 0.5534
Epoch 245/500
0s - loss: 0.4279 - acc: 0.8400 - val_loss: 1.0355 - val_acc: 0.5534
Epoch 246/500
0s - loss: 0.4227 - acc: 0.8000 - val_loss: 1.0334 - val_acc: 0.5631
Epoch 247/500
0s - loss: 0.4904 - acc: 0.8400 - val_loss: 1.0309 - val_acc: 0.5631
Epoch 248/500
0s - loss: 0.3486 - acc: 0.8800 - val_loss: 1.0263 - val_acc: 0.5922
Epoch 249/500
0s - loss: 0.5430 - acc: 0.7600 - val_loss: 1.0220 - val_acc: 0.6019
Epoch 250/500
0s - loss: 0.4088 - acc: 0.8800 - val_loss: 1.0184 - val_acc: 0.6019
Epoch 251/500
0s - loss: 0.3923 - acc: 0.8400 - val_loss: 1.0151 - val_acc: 0.5922
Epoch 252/500
0s - loss: 0.4623 - acc: 0.8000 - val_loss: 1.0158 - val_acc: 0.5922
Epoch 253/500
0s - loss: 0.4241 - acc: 0.8800 - val_loss: 1.0175 - val_acc: 0.6019
Epoch 254/500
0s - loss: 0.3980 - acc: 0.8800 - val_loss: 1.0203 - val_acc: 0.5922
Epoch 255/500
0s - loss: 0.4405 - acc: 0.8400 - val_loss: 1.0265 - val_acc: 0.6019
Epoch 256/500
0s - loss: 0.2900 - acc: 0.9600 - val_loss: 1.0336 - val_acc: 0.6019
Epoch 257/500
0s - loss: 0.3467 - acc: 0.9600 - val_loss: 1.0438 - val_acc: 0.5922
Epoch 258/500
0s - loss: 0.3646 - acc: 0.9200 - val_loss: 1.0560 - val_acc: 0.5534
Epoch 259/500
0s - loss: 0.4518 - acc: 0.8400 - val_loss: 1.0691 - val_acc: 0.5534
Epoch 260/500
0s - loss: 0.3878 - acc: 0.8400 - val_loss: 1.0767 - val_acc: 0.5631
Epoch 261/500
0s - loss: 0.4371 - acc: 0.8800 - val_loss: 1.0813 - val_acc: 0.5631
Epoch 262/500
0s - loss: 0.3373 - acc: 0.9200 - val_loss: 1.0883 - val_acc: 0.5631
Epoch 263/500
0s - loss: 0.3970 - acc: 0.8800 - val_loss: 1.0952 - val_acc: 0.5631
Epoch 264/500
0s - loss: 0.3901 - acc: 0.9200 - val_loss: 1.0977 - val_acc: 0.5631
Epoch 265/500
0s - loss: 0.5157 - acc: 0.6800 - val_loss: 1.0940 - val_acc: 0.5534
Epoch 266/500
0s - loss: 0.3968 - acc: 0.8800 - val_loss: 1.0908 - val_acc: 0.5534
Epoch 267/500
0s - loss: 0.4587 - acc: 0.8800 - val_loss: 1.0813 - val_acc: 0.5922
Epoch 268/500
0s - loss: 0.3494 - acc: 0.9200 - val_loss: 1.0809 - val_acc: 0.6019
Epoch 269/500
0s - loss: 0.3216 - acc: 0.9200 - val_loss: 1.0804 - val_acc: 0.6019
Epoch 270/500
0s - loss: 0.3631 - acc: 0.9600 - val_loss: 1.0816 - val_acc: 0.6019
Epoch 271/500
0s - loss: 0.3405 - acc: 0.9600 - val_loss: 1.0838 - val_acc: 0.6019
Epoch 272/500
0s - loss: 0.4010 - acc: 0.8400 - val_loss: 1.0914 - val_acc: 0.5922
Epoch 273/500
0s - loss: 0.3431 - acc: 0.9200 - val_loss: 1.1045 - val_acc: 0.5631
Epoch 274/500
0s - loss: 0.4410 - acc: 0.8000 - val_loss: 1.1110 - val_acc: 0.5534
Epoch 275/500
0s - loss: 0.3356 - acc: 0.8800 - val_loss: 1.1188 - val_acc: 0.5728
Epoch 276/500
0s - loss: 0.3512 - acc: 0.8800 - val_loss: 1.1232 - val_acc: 0.5534
Epoch 277/500
0s - loss: 0.4108 - acc: 0.8800 - val_loss: 1.1261 - val_acc: 0.5534
Epoch 278/500
0s - loss: 0.3797 - acc: 0.9200 - val_loss: 1.1256 - val_acc: 0.5534
Epoch 279/500
0s - loss: 0.3973 - acc: 0.8800 - val_loss: 1.1283 - val_acc: 0.5631
Epoch 280/500
0s - loss: 0.3987 - acc: 0.8400 - val_loss: 1.1274 - val_acc: 0.5631
Epoch 281/500
0s - loss: 0.3625 - acc: 0.8800 - val_loss: 1.1233 - val_acc: 0.5631
Epoch 282/500
0s - loss: 0.3339 - acc: 0.9200 - val_loss: 1.1164 - val_acc: 0.5922
Epoch 283/500
0s - loss: 0.4211 - acc: 0.8800 - val_loss: 1.1089 - val_acc: 0.5922
Epoch 284/500
0s - loss: 0.2876 - acc: 0.9200 - val_loss: 1.1039 - val_acc: 0.5922
Epoch 285/500
0s - loss: 0.3623 - acc: 0.8800 - val_loss: 1.1023 - val_acc: 0.5922
Epoch 286/500
0s - loss: 0.3815 - acc: 0.8000 - val_loss: 1.1037 - val_acc: 0.5922
Epoch 287/500
0s - loss: 0.3721 - acc: 0.8800 - val_loss: 1.1079 - val_acc: 0.5922
Epoch 288/500
0s - loss: 0.2935 - acc: 0.9200 - val_loss: 1.1111 - val_acc: 0.5922
Epoch 289/500
0s - loss: 0.4388 - acc: 0.8000 - val_loss: 1.1108 - val_acc: 0.6019
Epoch 290/500
0s - loss: 0.2730 - acc: 0.8800 - val_loss: 1.1123 - val_acc: 0.6019
Epoch 291/500
0s - loss: 0.3081 - acc: 0.8800 - val_loss: 1.1176 - val_acc: 0.6019
Epoch 292/500
0s - loss: 0.4008 - acc: 0.8800 - val_loss: 1.1229 - val_acc: 0.6019
Epoch 293/500
0s - loss: 0.3133 - acc: 0.9200 - val_loss: 1.1302 - val_acc: 0.6019
Epoch 294/500
0s - loss: 0.3379 - acc: 0.8800 - val_loss: 1.1387 - val_acc: 0.5728
Epoch 295/500
0s - loss: 0.3141 - acc: 0.9600 - val_loss: 1.1438 - val_acc: 0.5631
Epoch 296/500
0s - loss: 0.3374 - acc: 0.9200 - val_loss: 1.1459 - val_acc: 0.5728
Epoch 297/500
0s - loss: 0.3259 - acc: 0.9200 - val_loss: 1.1498 - val_acc: 0.5728
Epoch 298/500
0s - loss: 0.3283 - acc: 0.9600 - val_loss: 1.1501 - val_acc: 0.5728
Epoch 299/500
0s - loss: 0.3009 - acc: 0.8800 - val_loss: 1.1510 - val_acc: 0.6019
Epoch 300/500
0s - loss: 0.3740 - acc: 0.8400 - val_loss: 1.1527 - val_acc: 0.6019
Epoch 301/500
0s - loss: 0.3755 - acc: 0.8800 - val_loss: 1.1547 - val_acc: 0.6019
Epoch 302/500
0s - loss: 0.3755 - acc: 0.8800 - val_loss: 1.1570 - val_acc: 0.6019
Epoch 303/500
0s - loss: 0.4002 - acc: 0.9200 - val_loss: 1.1599 - val_acc: 0.6019
Epoch 304/500
0s - loss: 0.3478 - acc: 0.9200 - val_loss: 1.1584 - val_acc: 0.6019
Epoch 305/500
0s - loss: 0.3304 - acc: 0.8800 - val_loss: 1.1560 - val_acc: 0.6019
Epoch 306/500
0s - loss: 0.2838 - acc: 0.9600 - val_loss: 1.1537 - val_acc: 0.6019
Epoch 307/500
0s - loss: 0.3978 - acc: 0.8800 - val_loss: 1.1504 - val_acc: 0.6019
Epoch 308/500
0s - loss: 0.2868 - acc: 0.8800 - val_loss: 1.1493 - val_acc: 0.6019
Epoch 309/500
0s - loss: 0.3431 - acc: 0.9200 - val_loss: 1.1547 - val_acc: 0.6019
Epoch 310/500
0s - loss: 0.4089 - acc: 0.8800 - val_loss: 1.1621 - val_acc: 0.6019
Epoch 311/500
0s - loss: 0.3114 - acc: 0.9200 - val_loss: 1.1748 - val_acc: 0.6019
Epoch 312/500
0s - loss: 0.3520 - acc: 0.8800 - val_loss: 1.1932 - val_acc: 0.5631
Epoch 313/500
0s - loss: 0.3326 - acc: 0.8800 - val_loss: 1.2040 - val_acc: 0.5534
Epoch 314/500
0s - loss: 0.2410 - acc: 0.9600 - val_loss: 1.2087 - val_acc: 0.5534
Epoch 315/500
0s - loss: 0.2603 - acc: 0.9600 - val_loss: 1.2143 - val_acc: 0.5534
Epoch 316/500
0s - loss: 0.2854 - acc: 0.9200 - val_loss: 1.2153 - val_acc: 0.5534
Epoch 317/500
0s - loss: 0.3420 - acc: 0.8800 - val_loss: 1.2155 - val_acc: 0.5631
Epoch 318/500
0s - loss: 0.3675 - acc: 0.8800 - val_loss: 1.2164 - val_acc: 0.5728
Epoch 319/500
0s - loss: 0.3481 - acc: 0.8800 - val_loss: 1.2174 - val_acc: 0.5631
Epoch 320/500
0s - loss: 0.2772 - acc: 0.9200 - val_loss: 1.2178 - val_acc: 0.5728
Epoch 321/500
0s - loss: 0.4209 - acc: 0.8400 - val_loss: 1.2134 - val_acc: 0.5922
Epoch 322/500
0s - loss: 0.2582 - acc: 0.9600 - val_loss: 1.2119 - val_acc: 0.5922
Epoch 323/500
0s - loss: 0.3404 - acc: 0.9200 - val_loss: 1.2132 - val_acc: 0.5922
Epoch 324/500
0s - loss: 0.3010 - acc: 0.8800 - val_loss: 1.2153 - val_acc: 0.5922
Epoch 325/500
0s - loss: 0.3248 - acc: 0.9200 - val_loss: 1.2175 - val_acc: 0.5922
Epoch 326/500
0s - loss: 0.3153 - acc: 0.9200 - val_loss: 1.2216 - val_acc: 0.5922
Epoch 327/500
0s - loss: 0.3536 - acc: 0.8000 - val_loss: 1.2267 - val_acc: 0.5922
Epoch 328/500
0s - loss: 0.3018 - acc: 0.8800 - val_loss: 1.2340 - val_acc: 0.5922
Epoch 329/500
0s - loss: 0.2864 - acc: 0.8800 - val_loss: 1.2366 - val_acc: 0.5922
Epoch 330/500
0s - loss: 0.2966 - acc: 0.8800 - val_loss: 1.2407 - val_acc: 0.5922
Epoch 331/500
0s - loss: 0.2458 - acc: 0.9200 - val_loss: 1.2459 - val_acc: 0.5922
Epoch 332/500
0s - loss: 0.2924 - acc: 0.9200 - val_loss: 1.2514 - val_acc: 0.5922
Epoch 333/500
0s - loss: 0.3742 - acc: 0.8800 - val_loss: 1.2473 - val_acc: 0.5922
Epoch 334/500
0s - loss: 0.2910 - acc: 0.9200 - val_loss: 1.2459 - val_acc: 0.5922
Epoch 335/500
0s - loss: 0.3000 - acc: 0.9200 - val_loss: 1.2470 - val_acc: 0.5922
Epoch 336/500
0s - loss: 0.2695 - acc: 0.9200 - val_loss: 1.2486 - val_acc: 0.5922
Epoch 337/500
0s - loss: 0.3090 - acc: 0.8400 - val_loss: 1.2498 - val_acc: 0.5922
Epoch 338/500
0s - loss: 0.2665 - acc: 0.9600 - val_loss: 1.2535 - val_acc: 0.5922
Epoch 339/500
0s - loss: 0.3006 - acc: 0.8800 - val_loss: 1.2571 - val_acc: 0.5922
Epoch 340/500
0s - loss: 0.2629 - acc: 0.9600 - val_loss: 1.2561 - val_acc: 0.5922
Epoch 341/500
0s - loss: 0.2280 - acc: 0.9600 - val_loss: 1.2547 - val_acc: 0.5922
Epoch 342/500
0s - loss: 0.2772 - acc: 0.9200 - val_loss: 1.2520 - val_acc: 0.5922
Epoch 343/500
0s - loss: 0.2409 - acc: 0.8800 - val_loss: 1.2541 - val_acc: 0.5922
Epoch 344/500
0s - loss: 0.2297 - acc: 0.9600 - val_loss: 1.2568 - val_acc: 0.5922
Epoch 345/500
0s - loss: 0.2540 - acc: 0.9600 - val_loss: 1.2638 - val_acc: 0.5922
Epoch 346/500
0s - loss: 0.2753 - acc: 0.8800 - val_loss: 1.2712 - val_acc: 0.5922
Epoch 347/500
0s - loss: 0.2598 - acc: 0.9200 - val_loss: 1.2816 - val_acc: 0.5922
Epoch 348/500
0s - loss: 0.3332 - acc: 0.8000 - val_loss: 1.2946 - val_acc: 0.5631
Epoch 349/500
0s - loss: 0.3143 - acc: 0.9200 - val_loss: 1.3087 - val_acc: 0.5728
Epoch 350/500
0s - loss: 0.2813 - acc: 0.9600 - val_loss: 1.3279 - val_acc: 0.5631
Epoch 351/500
0s - loss: 0.3313 - acc: 0.8800 - val_loss: 1.3417 - val_acc: 0.5534
Epoch 352/500
0s - loss: 0.3252 - acc: 0.8800 - val_loss: 1.3528 - val_acc: 0.5534
Epoch 353/500
0s - loss: 0.2631 - acc: 0.9600 - val_loss: 1.3581 - val_acc: 0.5534
Epoch 354/500
0s - loss: 0.3641 - acc: 0.9200 - val_loss: 1.3476 - val_acc: 0.5631
Epoch 355/500
0s - loss: 0.3224 - acc: 0.8800 - val_loss: 1.3313 - val_acc: 0.5825
Epoch 356/500
0s - loss: 0.2555 - acc: 0.9200 - val_loss: 1.3134 - val_acc: 0.5728
Epoch 357/500
0s - loss: 0.2145 - acc: 0.9600 - val_loss: 1.3025 - val_acc: 0.5922
Epoch 358/500
0s - loss: 0.3099 - acc: 0.8800 - val_loss: 1.2944 - val_acc: 0.5922
Epoch 359/500
0s - loss: 0.3201 - acc: 0.8800 - val_loss: 1.2923 - val_acc: 0.6019
Epoch 360/500
0s - loss: 0.2942 - acc: 0.8800 - val_loss: 1.2921 - val_acc: 0.6019
Epoch 361/500
0s - loss: 0.2370 - acc: 0.9200 - val_loss: 1.2932 - val_acc: 0.5922
Epoch 362/500
0s - loss: 0.2910 - acc: 0.9600 - val_loss: 1.2965 - val_acc: 0.5922
Epoch 363/500
0s - loss: 0.2978 - acc: 0.8800 - val_loss: 1.3015 - val_acc: 0.5922
Epoch 364/500
0s - loss: 0.2424 - acc: 0.9200 - val_loss: 1.3108 - val_acc: 0.5922
Epoch 365/500
0s - loss: 0.3391 - acc: 0.8400 - val_loss: 1.3271 - val_acc: 0.5922
Epoch 366/500
0s - loss: 0.2492 - acc: 0.9200 - val_loss: 1.3400 - val_acc: 0.5825
Epoch 367/500
0s - loss: 0.2912 - acc: 0.9200 - val_loss: 1.3521 - val_acc: 0.5631
Epoch 368/500
0s - loss: 0.2732 - acc: 0.9200 - val_loss: 1.3541 - val_acc: 0.5631
Epoch 369/500
0s - loss: 0.2201 - acc: 0.9200 - val_loss: 1.3562 - val_acc: 0.5631
Epoch 370/500
0s - loss: 0.4202 - acc: 0.8400 - val_loss: 1.3463 - val_acc: 0.5922
Epoch 371/500
0s - loss: 0.3310 - acc: 0.8800 - val_loss: 1.3375 - val_acc: 0.5922
Epoch 372/500
0s - loss: 0.2659 - acc: 0.9200 - val_loss: 1.3243 - val_acc: 0.5922
Epoch 373/500
0s - loss: 0.3169 - acc: 0.8800 - val_loss: 1.3186 - val_acc: 0.5825
Epoch 374/500
0s - loss: 0.2474 - acc: 0.9600 - val_loss: 1.3170 - val_acc: 0.5825
Epoch 375/500
0s - loss: 0.3482 - acc: 0.9200 - val_loss: 1.3168 - val_acc: 0.5922
Epoch 376/500
0s - loss: 0.2842 - acc: 0.8800 - val_loss: 1.3182 - val_acc: 0.5922
Epoch 377/500
0s - loss: 0.2592 - acc: 0.9600 - val_loss: 1.3203 - val_acc: 0.5922
Epoch 378/500
0s - loss: 0.2934 - acc: 0.8800 - val_loss: 1.3226 - val_acc: 0.5825
Epoch 379/500
0s - loss: 0.2685 - acc: 0.9200 - val_loss: 1.3267 - val_acc: 0.5825
Epoch 380/500
0s - loss: 0.3792 - acc: 0.8800 - val_loss: 1.3358 - val_acc: 0.5825
Epoch 381/500
0s - loss: 0.4377 - acc: 0.8400 - val_loss: 1.3453 - val_acc: 0.5922
Epoch 382/500
0s - loss: 0.3018 - acc: 0.9200 - val_loss: 1.3531 - val_acc: 0.5922
Epoch 383/500
0s - loss: 0.2702 - acc: 0.9600 - val_loss: 1.3673 - val_acc: 0.6019
Epoch 384/500
0s - loss: 0.2334 - acc: 0.9200 - val_loss: 1.3760 - val_acc: 0.6019
Epoch 385/500
0s - loss: 0.2343 - acc: 0.9600 - val_loss: 1.3814 - val_acc: 0.6019
Epoch 386/500
0s - loss: 0.2150 - acc: 0.9200 - val_loss: 1.3955 - val_acc: 0.5922
Epoch 387/500
0s - loss: 0.2841 - acc: 0.9200 - val_loss: 1.4068 - val_acc: 0.6019
Epoch 388/500
0s - loss: 0.3314 - acc: 0.8800 - val_loss: 1.4069 - val_acc: 0.5922
Epoch 389/500
0s - loss: 0.2980 - acc: 0.8400 - val_loss: 1.4010 - val_acc: 0.5922
Epoch 390/500
0s - loss: 0.1771 - acc: 0.9600 - val_loss: 1.3983 - val_acc: 0.6019
Epoch 391/500
0s - loss: 0.2868 - acc: 0.8800 - val_loss: 1.3987 - val_acc: 0.6019
Epoch 392/500
0s - loss: 0.3312 - acc: 0.8800 - val_loss: 1.3963 - val_acc: 0.6019
Epoch 393/500
0s - loss: 0.2133 - acc: 0.9600 - val_loss: 1.3964 - val_acc: 0.6019
Epoch 394/500
0s - loss: 0.1957 - acc: 0.9200 - val_loss: 1.3935 - val_acc: 0.6019
Epoch 395/500
0s - loss: 0.2613 - acc: 0.9200 - val_loss: 1.3889 - val_acc: 0.6019
Epoch 396/500
0s - loss: 0.2499 - acc: 0.8800 - val_loss: 1.3830 - val_acc: 0.5825
Epoch 397/500
0s - loss: 0.2322 - acc: 0.9200 - val_loss: 1.3790 - val_acc: 0.5825
Epoch 398/500
0s - loss: 0.2637 - acc: 0.8800 - val_loss: 1.3746 - val_acc: 0.5825
Epoch 399/500
0s - loss: 0.3067 - acc: 0.8800 - val_loss: 1.3733 - val_acc: 0.5825
Epoch 400/500
0s - loss: 0.2289 - acc: 0.9200 - val_loss: 1.3768 - val_acc: 0.5825
Epoch 401/500
0s - loss: 0.2494 - acc: 0.9200 - val_loss: 1.3797 - val_acc: 0.5825
Epoch 402/500
0s - loss: 0.2738 - acc: 0.9200 - val_loss: 1.3798 - val_acc: 0.5825
Epoch 403/500
0s - loss: 0.2232 - acc: 0.9200 - val_loss: 1.3778 - val_acc: 0.5825
Epoch 404/500
0s - loss: 0.2908 - acc: 0.9200 - val_loss: 1.3791 - val_acc: 0.5825
Epoch 405/500
0s - loss: 0.2844 - acc: 0.8800 - val_loss: 1.3825 - val_acc: 0.5825
Epoch 406/500
0s - loss: 0.2725 - acc: 0.8400 - val_loss: 1.3907 - val_acc: 0.5922
Epoch 407/500
0s - loss: 0.2570 - acc: 0.9200 - val_loss: 1.3985 - val_acc: 0.6019
Epoch 408/500
0s - loss: 0.3101 - acc: 0.9200 - val_loss: 1.4079 - val_acc: 0.6019
Epoch 409/500
0s - loss: 0.3123 - acc: 0.8800 - val_loss: 1.4171 - val_acc: 0.5922
Epoch 410/500
0s - loss: 0.3175 - acc: 0.9200 - val_loss: 1.4234 - val_acc: 0.6019
Epoch 411/500
0s - loss: 0.3294 - acc: 0.8400 - val_loss: 1.4312 - val_acc: 0.6019
Epoch 412/500
0s - loss: 0.1816 - acc: 0.9200 - val_loss: 1.4373 - val_acc: 0.5825
Epoch 413/500
0s - loss: 0.2908 - acc: 0.8800 - val_loss: 1.4317 - val_acc: 0.6019
Epoch 414/500
0s - loss: 0.2682 - acc: 0.8800 - val_loss: 1.4247 - val_acc: 0.6019
Epoch 415/500
0s - loss: 0.3035 - acc: 0.8800 - val_loss: 1.4109 - val_acc: 0.5922
Epoch 416/500
0s - loss: 0.3273 - acc: 0.9200 - val_loss: 1.3923 - val_acc: 0.5922
Epoch 417/500
0s - loss: 0.2627 - acc: 0.9200 - val_loss: 1.3799 - val_acc: 0.5825
Epoch 418/500
0s - loss: 0.2203 - acc: 0.9200 - val_loss: 1.3714 - val_acc: 0.5825
Epoch 419/500
0s - loss: 0.2680 - acc: 0.9200 - val_loss: 1.3679 - val_acc: 0.5825
Epoch 420/500
0s - loss: 0.1742 - acc: 0.9600 - val_loss: 1.3680 - val_acc: 0.5825
Epoch 421/500
0s - loss: 0.2217 - acc: 0.9600 - val_loss: 1.3715 - val_acc: 0.5825
Epoch 422/500
0s - loss: 0.2098 - acc: 0.9600 - val_loss: 1.3778 - val_acc: 0.5825
Epoch 423/500
0s - loss: 0.2415 - acc: 0.9200 - val_loss: 1.3885 - val_acc: 0.5825
Epoch 424/500
0s - loss: 0.3734 - acc: 0.8400 - val_loss: 1.4055 - val_acc: 0.5922
Epoch 425/500
0s - loss: 0.1991 - acc: 0.9600 - val_loss: 1.4226 - val_acc: 0.5922
Epoch 426/500
0s - loss: 0.3547 - acc: 0.8400 - val_loss: 1.4375 - val_acc: 0.5922
Epoch 427/500
0s - loss: 0.2388 - acc: 0.9200 - val_loss: 1.4507 - val_acc: 0.5922
Epoch 428/500
0s - loss: 0.2185 - acc: 0.9200 - val_loss: 1.4597 - val_acc: 0.5922
Epoch 429/500
0s - loss: 0.2005 - acc: 0.9600 - val_loss: 1.4608 - val_acc: 0.5922
Epoch 430/500
0s - loss: 0.2727 - acc: 0.8800 - val_loss: 1.4647 - val_acc: 0.5922
Epoch 431/500
0s - loss: 0.3150 - acc: 0.9200 - val_loss: 1.4600 - val_acc: 0.5922
Epoch 432/500
0s - loss: 0.2764 - acc: 0.8800 - val_loss: 1.4476 - val_acc: 0.5922
Epoch 433/500
0s - loss: 0.2426 - acc: 0.9200 - val_loss: 1.4352 - val_acc: 0.5922
Epoch 434/500
0s - loss: 0.3155 - acc: 0.8800 - val_loss: 1.4310 - val_acc: 0.5825
Epoch 435/500
0s - loss: 0.1904 - acc: 0.9600 - val_loss: 1.4318 - val_acc: 0.5825
Epoch 436/500
0s - loss: 0.1864 - acc: 0.9600 - val_loss: 1.4332 - val_acc: 0.5825
Epoch 437/500
0s - loss: 0.2087 - acc: 0.9200 - val_loss: 1.4341 - val_acc: 0.5825
Epoch 438/500
0s - loss: 0.1966 - acc: 0.9600 - val_loss: 1.4367 - val_acc: 0.5825
Epoch 439/500
0s - loss: 0.2491 - acc: 0.8800 - val_loss: 1.4411 - val_acc: 0.5825
Epoch 440/500
0s - loss: 0.1675 - acc: 0.9600 - val_loss: 1.4465 - val_acc: 0.5825
Epoch 441/500
0s - loss: 0.2221 - acc: 0.9200 - val_loss: 1.4547 - val_acc: 0.5825
Epoch 442/500
0s - loss: 0.2240 - acc: 0.9200 - val_loss: 1.4654 - val_acc: 0.5825
Epoch 443/500
0s - loss: 0.1524 - acc: 0.9600 - val_loss: 1.4807 - val_acc: 0.5922
Epoch 444/500
0s - loss: 0.2019 - acc: 0.9600 - val_loss: 1.5044 - val_acc: 0.5922
Epoch 445/500
0s - loss: 0.1830 - acc: 0.9600 - val_loss: 1.5268 - val_acc: 0.6019
Epoch 446/500
0s - loss: 0.1672 - acc: 0.9600 - val_loss: 1.5510 - val_acc: 0.5825
Epoch 447/500
0s - loss: 0.2374 - acc: 0.9200 - val_loss: 1.5634 - val_acc: 0.5825
Epoch 448/500
0s - loss: 0.1231 - acc: 0.9600 - val_loss: 1.5761 - val_acc: 0.5631
Epoch 449/500
0s - loss: 0.2135 - acc: 0.9600 - val_loss: 1.5881 - val_acc: 0.5728
Epoch 450/500
0s - loss: 0.2357 - acc: 0.9200 - val_loss: 1.5843 - val_acc: 0.5631
Epoch 451/500
0s - loss: 0.2753 - acc: 0.8800 - val_loss: 1.5762 - val_acc: 0.5825
Epoch 452/500
0s - loss: 0.2663 - acc: 0.9200 - val_loss: 1.5655 - val_acc: 0.5825
Epoch 453/500
0s - loss: 0.1552 - acc: 0.9600 - val_loss: 1.5569 - val_acc: 0.6019
Epoch 454/500
0s - loss: 0.1931 - acc: 0.9200 - val_loss: 1.5451 - val_acc: 0.5922
Epoch 455/500
0s - loss: 0.2066 - acc: 0.9200 - val_loss: 1.5323 - val_acc: 0.6019
Epoch 456/500
0s - loss: 0.1872 - acc: 0.9600 - val_loss: 1.5301 - val_acc: 0.5922
Epoch 457/500
0s - loss: 0.2541 - acc: 0.8800 - val_loss: 1.5246 - val_acc: 0.5825
Epoch 458/500
0s - loss: 0.2375 - acc: 0.9200 - val_loss: 1.5188 - val_acc: 0.5825
Epoch 459/500
0s - loss: 0.1748 - acc: 0.9200 - val_loss: 1.5190 - val_acc: 0.5825
Epoch 460/500
0s - loss: 0.1704 - acc: 0.9600 - val_loss: 1.5213 - val_acc: 0.5825
Epoch 461/500
0s - loss: 0.2258 - acc: 0.9200 - val_loss: 1.5261 - val_acc: 0.5825
Epoch 462/500
0s - loss: 0.1946 - acc: 0.9600 - val_loss: 1.5314 - val_acc: 0.5825
Epoch 463/500
0s - loss: 0.3055 - acc: 0.8800 - val_loss: 1.5347 - val_acc: 0.5825
Epoch 464/500
0s - loss: 0.1835 - acc: 0.9600 - val_loss: 1.5396 - val_acc: 0.5825
Epoch 465/500
0s - loss: 0.1842 - acc: 0.9600 - val_loss: 1.5478 - val_acc: 0.5825
Epoch 466/500
0s - loss: 0.2595 - acc: 0.9200 - val_loss: 1.5591 - val_acc: 0.5922
Epoch 467/500
0s - loss: 0.1696 - acc: 0.9600 - val_loss: 1.5711 - val_acc: 0.6019
Epoch 468/500
0s - loss: 0.2105 - acc: 0.9200 - val_loss: 1.5807 - val_acc: 0.6019
Epoch 469/500
0s - loss: 0.2162 - acc: 0.8800 - val_loss: 1.5894 - val_acc: 0.6019
Epoch 470/500
0s - loss: 0.2823 - acc: 0.8800 - val_loss: 1.5946 - val_acc: 0.5922
Epoch 471/500
0s - loss: 0.3182 - acc: 0.9200 - val_loss: 1.5856 - val_acc: 0.5922
Epoch 472/500
0s - loss: 0.2596 - acc: 0.8800 - val_loss: 1.5764 - val_acc: 0.5922
Epoch 473/500
0s - loss: 0.2835 - acc: 0.9200 - val_loss: 1.5712 - val_acc: 0.5922
Epoch 474/500
0s - loss: 0.2448 - acc: 0.9200 - val_loss: 1.5643 - val_acc: 0.5825
Epoch 475/500
0s - loss: 0.2464 - acc: 0.9200 - val_loss: 1.5630 - val_acc: 0.5825
Epoch 476/500
0s - loss: 0.2075 - acc: 0.9600 - val_loss: 1.5622 - val_acc: 0.5825
Epoch 477/500
0s - loss: 0.2354 - acc: 0.9200 - val_loss: 1.5631 - val_acc: 0.5825
Epoch 478/500
0s - loss: 0.2567 - acc: 0.8800 - val_loss: 1.5651 - val_acc: 0.5825
Epoch 479/500
0s - loss: 0.2095 - acc: 0.9200 - val_loss: 1.5662 - val_acc: 0.5825
Epoch 480/500
0s - loss: 0.2655 - acc: 0.8800 - val_loss: 1.5672 - val_acc: 0.5825
Epoch 481/500
0s - loss: 0.3013 - acc: 0.8400 - val_loss: 1.5680 - val_acc: 0.5825
Epoch 482/500
0s - loss: 0.1499 - acc: 0.9600 - val_loss: 1.5722 - val_acc: 0.5922
Epoch 483/500
0s - loss: 0.1390 - acc: 0.9600 - val_loss: 1.5782 - val_acc: 0.5922
Epoch 484/500
0s - loss: 0.2972 - acc: 0.8800 - val_loss: 1.5926 - val_acc: 0.6019
Epoch 485/500
0s - loss: 0.2610 - acc: 0.8800 - val_loss: 1.5977 - val_acc: 0.6019
Epoch 486/500
0s - loss: 0.1922 - acc: 0.9200 - val_loss: 1.6009 - val_acc: 0.6019
Epoch 487/500
0s - loss: 0.2304 - acc: 0.9200 - val_loss: 1.6039 - val_acc: 0.6019
Epoch 488/500
0s - loss: 0.1960 - acc: 0.9200 - val_loss: 1.6079 - val_acc: 0.5922
Epoch 489/500
0s - loss: 0.1888 - acc: 0.9600 - val_loss: 1.6034 - val_acc: 0.5922
Epoch 490/500
0s - loss: 0.1657 - acc: 0.9600 - val_loss: 1.6002 - val_acc: 0.6019
Epoch 491/500
0s - loss: 0.2653 - acc: 0.9200 - val_loss: 1.5951 - val_acc: 0.5922
Epoch 492/500
0s - loss: 0.1923 - acc: 0.9200 - val_loss: 1.5933 - val_acc: 0.5922
Epoch 493/500
0s - loss: 0.1867 - acc: 0.9600 - val_loss: 1.5912 - val_acc: 0.5922
Epoch 494/500
0s - loss: 0.2367 - acc: 0.8800 - val_loss: 1.5850 - val_acc: 0.5922
Epoch 495/500
0s - loss: 0.2547 - acc: 0.9200 - val_loss: 1.5797 - val_acc: 0.5922
Epoch 496/500
0s - loss: 0.1750 - acc: 0.9600 - val_loss: 1.5771 - val_acc: 0.5922
Epoch 497/500
0s - loss: 0.3278 - acc: 0.8800 - val_loss: 1.5768 - val_acc: 0.5922
Epoch 498/500
0s - loss: 0.1529 - acc: 0.9600 - val_loss: 1.5794 - val_acc: 0.5922
Epoch 499/500
0s - loss: 0.1739 - acc: 0.9600 - val_loss: 1.5846 - val_acc: 0.5922
Epoch 500/500
0s - loss: 0.2231 - acc: 0.9200 - val_loss: 1.5923 - val_acc: 0.5922
32/86 [==========>...................] - ETA: 0s

accuracy= 0.662790697674
32/86 [==========>...................] - ETA: 0s

In [101]:
# Train/Test Score,接續後面的confusion matrix
df_ans = pd.DataFrame({'Real Class' :test_label})
df_ans['Prediction'] = prediction

In [103]:
# confusion matrix
prediction2_list = prediction.reshape(-1).astype(int)
label2_list = test_label.astype(int)

print(classification_report(label2_list, prediction2_list))
print(confusion_matrix(label2_list, prediction2_list))

# conf heatmap
conf = confusion_matrix(label2_list, prediction2_list)
f, ax= plt.subplots(figsize = (7, 7))
sns.heatmap(conf, annot=True, ax=ax, fmt='d', annot_kws={'size':20}) #字體大小
ax.xaxis.set_ticks_position('top') #Making x label be on top is common in textbooks.
plt.show()


//anaconda/lib/python3.5/site-packages/sklearn/metrics/classification.py:1135: UndefinedMetricWarning: Precision and F-score are ill-defined and being set to 0.0 in labels with no predicted samples.
  'precision', 'predicted', average, warn_for)
//anaconda/lib/python3.5/site-packages/sklearn/metrics/classification.py:1137: UndefinedMetricWarning: Recall and F-score are ill-defined and being set to 0.0 in labels with no true samples.
  'recall', 'true', average, warn_for)
             precision    recall  f1-score   support

          0       0.00      0.00      0.00         0
          1       0.17      0.18      0.18        28
          2       0.00      0.00      0.00        28
          3       0.00      0.00      0.00         6
          4       0.00      0.00      0.00         0
          5       0.13      0.33      0.19         6
          6       0.00      0.00      0.00         3
          7       0.00      0.00      0.00        15

avg / total       0.07      0.08      0.07        86

[[ 0  0  0  0  0  0  0  0]
 [23  5  0  0  0  0  0  0]
 [ 4 20  0  2  1  1  0  0]
 [ 5  1  0  0  0  0  0  0]
 [ 0  0  0  0  0  0  0  0]
 [ 0  2  0  1  1  2  0  0]
 [ 0  1  0  1  1  0  0  0]
 [ 1  0  0  1  1 12  0  0]]

In [ ]: