In [62]:
#version vx1

## new to this version
# keras.layers.merge.Concatenate(axis=-1)

## already incode
# selected features 11
# STD>3 train entry removed

## already tried
# 

## other options
# optimizers, batch size, 
# dropout layer, batchnorm, dense (layers/nuron per) numbers

## future
# remove column with <.2 variance
# try individual feature then try combinations

In [63]:
#0.40094 <- 
#0.39138 <- # real,elim std>3, # 12-300-30-10-1 # test_vx1 #epoch 60/200 0s - loss: 0.3677 - val_loss: 0.2493
#0.40566 <- # real,elim std>3, # 12-300-30-10-1 # test_v1_ext #epoch 72/200 0s - loss: 0.5543 - val_loss: 0.2496
#0.48500 <- elim std>3, 12-300-30-10-1
#0.52512 <- Epoch 11/200  16s - loss: 0.4993 - val_loss: 0.3509
#0.38798 <- 12-300-30-5-1
#0.41785<- 9col grp1 loss: 0.3123 - acc: 0.0000e+00 - val_loss: 0.2602
#0.43247 <-3col 
#0.53085 <- adadelta
#0.50647 <-decay
#0.48978 <-rmsle

#task
# feature extraction
# feature selection
# optimizaer selection , lr , decay

In [16]:
### importing libraries
%matplotlib inline
from keras.layers.convolutional import Convolution2D, MaxPooling2D, Convolution1D
from keras.layers.core import Dense, Activation, Dropout, Flatten
from keras.models import Sequential
from keras.optimizers import SGD,RMSprop
from keras.datasets import mnist
from keras.layers import Concatenate,Merge
from keras.layers.normalization import BatchNormalization
from keras.callbacks import EarlyStopping

import numpy as np
import pandas as pd
from keras.wrappers.scikit_learn import KerasRegressor
from sklearn.model_selection import cross_val_score
from sklearn.model_selection import KFold
from sklearn.preprocessing import StandardScaler
from sklearn.pipeline import Pipeline
from sklearn import preprocessing

import matplotlib as mpl

#mpl.use('Agg')
import matplotlib.pyplot as plt
import time
timestr = time.strftime("%Y%m%d-%H%M%S")
print(timestr)


20170504-223227

In [4]:
def load_data():
    df_train = pd.read_csv("../input/train.csv", parse_dates=['timestamp'])
    df_test = pd.read_csv("../input/test.csv", parse_dates=['timestamp'])
    df_macro = pd.read_csv("../input/macro.csv", parse_dates=['timestamp'])
    return [df_train,df_test,df_macro]
#df_train.head()

In [6]:
#print( df_train.shape)

In [7]:
#selected features

gr1=["full_sq", "life_sq", "floor", "max_floor", "material", 
"build_year","num_room", "kitch_sq","state","radiation_km",
"green_zone_km","railroad_km", "public_transport_station_km","metro_min_avto" ] 
#"kindergarden_km" 
#"sub_area" = object, str

gr1=["full_sq"] #elbo 10 epo  26/200 0s - loss: 0.3472 - val_loss: 0.2767
gr1=["life_sq"] #     20 epo 163/200 0s - loss: 0.4144 - val_loss: 0.3150
gr1=["floor"]   #     55 epo  83/200 0s - loss: 0.4261 - val_loss: 0.3554


gr1 = list(set(gr1))
print(len(gr1))
for c in gr1:
    print (c+"\t"+str(df_train[c].dtype))


1
---------------------------------------------------------------------------
NameError                                 Traceback (most recent call last)
<ipython-input-7-9711c9ffc3dd> in <module>()
     15 print(len(gr1))
     16 for c in gr1:
---> 17     print (c+"\t"+str(df_train[c].dtype))

NameError: name 'df_train' is not defined

In [8]:
# This section added:  drop crazy data point
def drop_crazy_data_point(clist,df_train):
    '''drop_crazy_data_point, STD>3
    clist = columnlist to use
    df_train = pandas dataframe
    '''
    
    #clist = gr1#['life_sq','floor']
    for cname in clist:
        df_train.drop(df_train[ np.abs((df_train[cname]-df_train[cname].mean())/df_train[cname].std())>3].index, inplace=True)
    print('shape after drop_crazy_data_point:')
    print( df_train.shape)
    return df_train

In [9]:
def take_ytrain_testid_traincnt(df_train,df_test):
    y_train = df_train['price_doc'].values
    id_test = df_test['id']

    df_train.drop(['id', 'price_doc'], axis=1, inplace=True)
    df_test.drop(['id'], axis=1, inplace=True)

    num_train = len(df_train)
    df_all = pd.concat([df_train, df_test])
    df_all = df_all.join(df_macro, on='timestamp', rsuffix='_macro')
    print(df_all.shape)
    

    # Add month-year
    month_year = (df_all.timestamp.dt.month + df_all.timestamp.dt.year * 100)
    month_year_cnt_map = month_year.value_counts().to_dict()
    df_all['month_year_cnt'] = month_year.map(month_year_cnt_map)

    # Add week-year count
    week_year = (df_all.timestamp.dt.weekofyear + df_all.timestamp.dt.year * 100)
    week_year_cnt_map = week_year.value_counts().to_dict()
    df_all['week_year_cnt'] = week_year.map(week_year_cnt_map)

    # Add month and day-of-week
    df_all['month'] = df_all.timestamp.dt.month
    df_all['dow'] = df_all.timestamp.dt.dayofweek

    # Other feature engineering
    #df_all['rel_floor'] = df_all['floor'] / df_all['max_floor'].astype(float)
    #df_all['rel_kitch_sq'] = df_all['kitch_sq'] / df_all['full_sq'].astype(float)

    # Remove timestamp column (may overfit the model in train)
    df_all.drop(['timestamp', 'timestamp_macro'], axis=1, inplace=True)

    return [y_train,id_test,num_train,df_all]

In [53]:
def factor(df_all,num_train,gr1,gr2):
    factorize = lambda t: pd.factorize(t[1])[0]

    df_obj = df_all.select_dtypes(include=['object'])

    X_all = np.c_[
        df_all.select_dtypes(exclude=['object']).values,
        np.array(list(map(factorize, df_obj.iteritems()))).T
    ]
    print(X_all.shape)

    X_train = X_all[:num_train]
    X_test = X_all[num_train:]

    # Deal with categorical values
    df_numeric = df_all.select_dtypes(exclude=['object'])
    df_obj = df_all.select_dtypes(include=['object']).copy()

    for c in df_obj:
        df_obj[c] = pd.factorize(df_obj[c])[0]

    df_values = pd.concat([df_numeric, df_obj], axis=1)

    df_values=df_values.fillna(df_values.mean())
    df_values=df_values.dropna(axis="columns", how='all')

    #df_values.drop(['area_m','ID_metro'], axis=1, inplace=True)

    
    df_values1=df_values[gr1]
    df_values2=df_values[gr2]
    
    #df_values.mean()
    #df_values.shape
    
    # Convert to numpy values
    X_all1 = df_values1.values
    X_all2 = df_values2.values
    #print(X_all1.shape)

    X_train1 = X_all1[:num_train]
    X_train2 = X_all2[:num_train]
    
    X_test1 = X_all1[num_train:]
    X_test2 = X_all2[num_train:]

    #df_columns = df_values.columns.tolist
    #df_columns

    return [X_train1,X_train2,X_test1,X_test2]

In [73]:
#for c in df_values.columns:
#    if df_values[c].dtype == 'object':
#        lbl = preprocessing.LabelEncoder()
#        lbl.fit(list(df_values[c].values)) 
#        df_values[c] = lbl.transform(list(df_values[c].values))

In [74]:
#c = df_values.columns[4]
#df_values[c]
#c

In [76]:
#X_all.tofile("x_all.csv",format="%s",sep=",")

In [77]:
#df_valuesclean=df_values.dropna(axis="columns", how='any')
#df_valuesclean.shape

In [140]:
from keras.layers.merge import Concatenate
# define base model
def baseline_model(df_values1_cols,df_values2_cols):
    
    model1 = Sequential()
    model1.add(Dense(30, input_dim=df_values1_cols, activation='relu'))
    #model1.add(BatchNormalization())
    #model1.add(Dropout(0.5))
    #model1.add(Dense(60,activation="relu"))
    #model1.add(Dropout(0.25))
    model1.add(Dense(15,activation="relu"))
    #model1.add(Dropout(0.25))
    
    model2 = Sequential()
    model2.add(Dense(30, input_dim=df_values2_cols, activation='relu'))
    #model1.add(BatchNormalization())
    #model2.add(Dropout(0.5))
    #model2.add(Dense(60,activation="relu"))
    #model2.add(Dropout(0.5))
    model2.add(Dense(15,activation="relu"))
    #model2.add(Dropout(0.25))
    
    model = Sequential()
    #model.add(Concatenate([model1, model2],input_shape=(120,1)))
    model.add(Merge([model1, model2], mode='concat'))
    model.add(Dense(100,activation="relu"))
    #model.add(Dropout(0.25))
    model.add(Dense(30,activation="relu"))
    model.add(Dense(1,activation="relu"))
    # Compile model
    #sgd=SGD(lr=0.01, momentum=0.0, decay=0.0, nesterov=False)
    model.compile(loss='mean_squared_logarithmic_error', optimizer=RMSprop(decay=0.0001))
        #Adadelta
        #sgd
    return [model1,model2,model]

In [89]:
[df_train,df_test,df_macro]=load_data()

In [121]:
[model1,model2,model]=baseline_model(15,15)
model1.summary()
model2.summary()
model.summary()


_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
dense_218 (Dense)            (None, 300)               4800      
_________________________________________________________________
batch_normalization_2 (Batch (None, 300)               1200      
_________________________________________________________________
dropout_155 (Dropout)        (None, 300)               0         
_________________________________________________________________
dense_219 (Dense)            (None, 60)                18060     
_________________________________________________________________
batch_normalization_3 (Batch (None, 60)                240       
=================================================================
Total params: 24,300
Trainable params: 23,580
Non-trainable params: 720
_________________________________________________________________
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
dense_220 (Dense)            (None, 300)               4800      
_________________________________________________________________
dropout_156 (Dropout)        (None, 300)               0         
_________________________________________________________________
dense_221 (Dense)            (None, 60)                18060     
=================================================================
Total params: 22,860
Trainable params: 22,860
Non-trainable params: 0
_________________________________________________________________
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
merge_26 (Merge)             (None, 120)               0         
_________________________________________________________________
dense_222 (Dense)            (None, 30)                3630      
_________________________________________________________________
dense_223 (Dense)            (None, 1)                 31        
=================================================================
Total params: 50,821
Trainable params: 50,101
Non-trainable params: 720
_________________________________________________________________
/home/minesh/anaconda3/lib/python3.6/site-packages/ipykernel/__main__.py:25: UserWarning: The `Merge` layer is deprecated and will be removed after 08/2017. Use instead layers from `keras.layers.merge`, e.g. `add`, `concatenate`, etc.

In [133]:
def trainKeras(model,X_train1,X_train2,y_train):
#model.compile(loss='mean_squared_error', optimizer='sgd', metrics=['accuracy'])
    print ("^^^INFO: Fit Model^^^")
#X_train = X_train.reshape(X_train.shape[0],244,1)

    callbacks = [
        EarlyStopping(monitor='val_loss', patience=5, verbose=2)
    ]

    history = model.fit(x=[X_train1,X_train2],y= y_train, epochs=400, batch_size=780, validation_split=0.3, verbose=2)#,callbacks=callbacks) #verbose=2 )#
    return [model,history]

In [136]:
# list all data in history
'''
print(history.history.keys())
# summarize history for accuracy
plt.plot(history.history['acc'])
plt.plot(history.history['val_acc'])
plt.title('model accuracy')
plt.ylabel('accuracy')
plt.xlabel('epoch')
plt.legend(['train', 'test'], loc='upper left')
# axes = plt.gca()
# axes.set_xlim([0,120])
# axes.set_ylim([90,100])
#plt.savefig('acc.png')  # save the figure to file
plt.show()
#plt.close()
'''


# summarize history for loss
plt.plot(history.history['loss'])
plt.plot(history.history['val_loss'])
plt.title('model loss')
plt.ylabel('loss')
plt.xlabel('epoch')
plt.legend(['train', 'test'], loc='upper left')
#plt.savefig('loss.png')
#plt.show()

plt.ylim([0, \
          min(history.history['loss'])+min(history.history['loss'])/2])
plt.show()

#plt.fig()
#plt.close()



In [30]:
from matplotlib.backends.backend_pdf import PdfPages
with PdfPages('multipage_pdf.pdf') as pdf:
    for i in range(0,10):
    # summarize history for loss
        plt.figure()
        plt.plot(history.history['loss'])
        plt.plot(history.history['val_loss'])
        plt.title('model loss')
        plt.ylabel('loss')
        plt.xlabel('epoch')
        plt.legend(['train', 'test'], loc='upper left')

        plt.ylim([0, 2])
        #plt.show()
        pdf.savefig()  # saves the current figure into a pdf page
        plt.close()


---------------------------------------------------------------------------
NameError                                 Traceback (most recent call last)
<ipython-input-30-4788bc045bd5> in <module>()
      4     # summarize history for loss
      5         plt.figure()
----> 6         plt.plot(history.history['loss'])
      7         plt.plot(history.history['val_loss'])
      8         plt.title('model loss')

NameError: name 'history' is not defined
<matplotlib.figure.Figure at 0x7f6c3b2c44e0>

In [135]:
# summarize history for loss
plt.plot(history.history['loss'])
plt.plot(history.history['val_loss'])
plt.title('model loss')
plt.ylabel('loss')
plt.xlabel('epoch')
plt.legend(['train', 'test'], loc='upper left')

plt.ylim([0, 2])
plt.show()



In [32]:
filePath="myfile.txt"

In [33]:
def writeToFile(history,filePath,columns):
    fh=open(filePath,'a')
    loss=history.history['loss']
    loss_diff=np.diff(loss)
    val_loss=history.history['val_loss']
    val_loss_diff=np.diff(val_loss)

    loss_Elbow=min(list((x for x in range(0,len(loss_diff.tolist())) if loss_diff.tolist()[x] > -0.01)))+1
    val_loss_Elbow=min(list((x for x in range(0,len(val_loss_diff.tolist())) if val_loss_diff.tolist()[x] > -0.01)))+1

    print(','.join(columns),"\tlE",loss_Elbow,"\tvlE",val_loss_Elbow,"\ttepo",len(history.history['loss']),end='',file=fh)
    print("\tloss\t{0:0.5f}".format(history.history['loss'][-1]),"\tval_loss\t{0:0.5f}".format(history.history['val_loss'][-1]),file=fh)

In [34]:
writeToFile(history,filePath,columns=['1','2'])


---------------------------------------------------------------------------
NameError                                 Traceback (most recent call last)
<ipython-input-34-04bf8de6f0a9> in <module>()
----> 1 writeToFile(history,filePath,columns=['1','2'])

NameError: name 'history' is not defined

In [35]:
#print("loss\t{0:0.5f}".format(history.history['loss'][-1]),"\tval_loss\t{0:0.5f}".format(history.history['val_loss'][-1]))

#print("%0.3f \tval_loss\t %0.5f " % history.history['loss'][-1],history.history['val_loss'][-1])

In [141]:
#looping for features
gr4=["full_sq", "life_sq", "floor", "max_floor", "material", 
"build_year","num_room", "kitch_sq","state","radiation_km"]
gr3 =["green_zone_km","railroad_km", "public_transport_station_km","metro_min_avto" ] 

gr1=["num_room", "life_sq", "metro_min_walk", "nuclear_reactor_km", "ttk_km", "zd_vokzaly_avto_km", "sadovoe_km", "bulvar_ring_km", "kremlin_km", "stadium_km"]
gr2=["basketball_km", "ID_railroad_station_walk", "build_count_slag", "build_count_panel", "prom_part_3000", "build_count_frame",
     "build_count_before_1920", "indust_part", "raion_build_count_with_material_info",
     "build_count_1971-1995"]

gr1=gr1+gr2+gr3+gr4
gr2=["life_sq", "big_church_km", "metro_km_walk", "public_healthcare_km", "kremlin_km",
     "fitness_km", "market_shop_km", "basketball_km", "zd_vokzaly_avto_km", 
     "green_part_1500"]

clist=gr1+gr2
print("--------->",clist)
print("=> loading data:")
[df_train,df_test,df_macro]=load_data()
print("=> drop_crazy_data_point:")
df_train=drop_crazy_data_point(clist,df_train)
print("=> prepare data:")
[y_train,id_test,num_train,df_all]=take_ytrain_testid_traincnt(df_train,df_test)
print('=> handle factors')
[X_train1,X_train2,X_test1,X_test2]=factor(df_all,num_train,gr1,gr2)
print('=> model arch')
[model1,model2,model]=baseline_model(X_train1.shape[1],X_train2.shape[1])
print('=> train')
[model,history]=trainKeras(model,X_train1,X_train2,y_train)
print('=>print to file')
writeToFile(history,filePath="try1_concat.txt",columns=clist)


---------> ['num_room', 'life_sq', 'metro_min_walk', 'nuclear_reactor_km', 'ttk_km', 'zd_vokzaly_avto_km', 'sadovoe_km', 'bulvar_ring_km', 'kremlin_km', 'stadium_km', 'basketball_km', 'ID_railroad_station_walk', 'build_count_slag', 'build_count_panel', 'prom_part_3000', 'build_count_frame', 'build_count_before_1920', 'indust_part', 'raion_build_count_with_material_info', 'build_count_1971-1995', 'green_zone_km', 'railroad_km', 'public_transport_station_km', 'metro_min_avto', 'full_sq', 'life_sq', 'floor', 'max_floor', 'material', 'build_year', 'num_room', 'kitch_sq', 'state', 'radiation_km', 'life_sq', 'big_church_km', 'metro_km_walk', 'public_healthcare_km', 'kremlin_km', 'fitness_km', 'market_shop_km', 'basketball_km', 'zd_vokzaly_avto_km', 'green_part_1500']
=> loading data:
=> drop_crazy_data_point:
shape after drop_crazy_data_point:
(21836, 292)
=> prepare data:
(29498, 390)
=> handle factors
(29498, 392)
=> model arch
=> train
^^^INFO: Fit Model^^^
/home/minesh/anaconda3/lib/python3.6/site-packages/ipykernel/__main__.py:25: UserWarning: The `Merge` layer is deprecated and will be removed after 08/2017. Use instead layers from `keras.layers.merge`, e.g. `add`, `concatenate`, etc.
Train on 15285 samples, validate on 6551 samples
Epoch 1/400
0s - loss: 94.6556 - val_loss: 79.4855
Epoch 2/400
0s - loss: 67.3800 - val_loss: 61.2795
Epoch 3/400
0s - loss: 52.4742 - val_loss: 48.3097
Epoch 4/400
0s - loss: 41.2685 - val_loss: 38.2627
Epoch 5/400
0s - loss: 32.5153 - val_loss: 30.3260
Epoch 6/400
0s - loss: 25.5239 - val_loss: 23.9273
Epoch 7/400
0s - loss: 20.0032 - val_loss: 18.8836
Epoch 8/400
0s - loss: 15.5951 - val_loss: 14.7821
Epoch 9/400
0s - loss: 12.0364 - val_loss: 11.4927
Epoch 10/400
0s - loss: 9.2144 - val_loss: 8.8606
Epoch 11/400
0s - loss: 6.9530 - val_loss: 6.7224
Epoch 12/400
0s - loss: 5.1493 - val_loss: 5.0259
Epoch 13/400
0s - loss: 3.7370 - val_loss: 3.6804
Epoch 14/400
0s - loss: 2.6471 - val_loss: 2.6440
Epoch 15/400
0s - loss: 1.8281 - val_loss: 1.8555
Epoch 16/400
0s - loss: 1.2331 - val_loss: 1.2777
Epoch 17/400
0s - loss: 0.8222 - val_loss: 0.8729
Epoch 18/400
0s - loss: 0.5626 - val_loss: 0.6118
Epoch 19/400
0s - loss: 0.4238 - val_loss: 0.4662
Epoch 20/400
0s - loss: 0.3706 - val_loss: 0.4033
Epoch 21/400
0s - loss: 0.3615 - val_loss: 0.3947
Epoch 22/400
0s - loss: 0.3600 - val_loss: 0.3896
Epoch 23/400
0s - loss: 0.3585 - val_loss: 0.3839
Epoch 24/400
0s - loss: 0.3567 - val_loss: 0.3802
Epoch 25/400
0s - loss: 0.3550 - val_loss: 0.3823
Epoch 26/400
0s - loss: 0.3534 - val_loss: 0.3811
Epoch 27/400
0s - loss: 0.3518 - val_loss: 0.3631
Epoch 28/400
0s - loss: 0.3502 - val_loss: 0.3652
Epoch 29/400
0s - loss: 0.3485 - val_loss: 0.3683
Epoch 30/400
0s - loss: 0.3470 - val_loss: 0.3588
Epoch 31/400
0s - loss: 0.3453 - val_loss: 0.3498
Epoch 32/400
0s - loss: 0.3438 - val_loss: 0.3635
Epoch 33/400
0s - loss: 0.3423 - val_loss: 0.3530
Epoch 34/400
0s - loss: 0.3404 - val_loss: 0.3491
Epoch 35/400
0s - loss: 0.3388 - val_loss: 0.3485
Epoch 36/400
0s - loss: 0.3369 - val_loss: 0.3434
Epoch 37/400
0s - loss: 0.3354 - val_loss: 0.3458
Epoch 38/400
0s - loss: 0.3338 - val_loss: 0.3378
Epoch 39/400
0s - loss: 0.3319 - val_loss: 0.3426
Epoch 40/400
0s - loss: 0.3301 - val_loss: 0.3363
Epoch 41/400
0s - loss: 0.3283 - val_loss: 0.3357
Epoch 42/400
0s - loss: 0.3265 - val_loss: 0.3271
Epoch 43/400
0s - loss: 0.3247 - val_loss: 0.3278
Epoch 44/400
0s - loss: 0.3228 - val_loss: 0.3252
Epoch 45/400
0s - loss: 0.3210 - val_loss: 0.3232
Epoch 46/400
0s - loss: 0.3193 - val_loss: 0.3212
Epoch 47/400
0s - loss: 0.3174 - val_loss: 0.3199
Epoch 48/400
0s - loss: 0.3155 - val_loss: 0.3064
Epoch 49/400
0s - loss: 0.3138 - val_loss: 0.3107
Epoch 50/400
0s - loss: 0.3118 - val_loss: 0.3049
Epoch 51/400
0s - loss: 0.3099 - val_loss: 0.3041
Epoch 52/400
0s - loss: 0.3081 - val_loss: 0.2969
Epoch 53/400
0s - loss: 0.3066 - val_loss: 0.2991
Epoch 54/400
0s - loss: 0.3050 - val_loss: 0.2964
Epoch 55/400
0s - loss: 0.3033 - val_loss: 0.3002
Epoch 56/400
0s - loss: 0.3017 - val_loss: 0.2934
Epoch 57/400
0s - loss: 0.2998 - val_loss: 0.2956
Epoch 58/400
0s - loss: 0.2984 - val_loss: 0.2925
Epoch 59/400
0s - loss: 0.2968 - val_loss: 0.2846
Epoch 60/400
0s - loss: 0.2952 - val_loss: 0.2788
Epoch 61/400
0s - loss: 0.2941 - val_loss: 0.2819
Epoch 62/400
0s - loss: 0.2926 - val_loss: 0.2795
Epoch 63/400
0s - loss: 0.2912 - val_loss: 0.2721
Epoch 64/400
0s - loss: 0.2897 - val_loss: 0.2786
Epoch 65/400
0s - loss: 0.2884 - val_loss: 0.2759
Epoch 66/400
0s - loss: 0.2871 - val_loss: 0.2698
Epoch 67/400
0s - loss: 0.2859 - val_loss: 0.2651
Epoch 68/400
0s - loss: 0.2850 - val_loss: 0.2665
Epoch 69/400
0s - loss: 0.2839 - val_loss: 0.2651
Epoch 70/400
0s - loss: 0.2830 - val_loss: 0.2732
Epoch 71/400
0s - loss: 0.2823 - val_loss: 0.2648
Epoch 72/400
0s - loss: 0.2815 - val_loss: 0.2608
Epoch 73/400
0s - loss: 0.2807 - val_loss: 0.2637
Epoch 74/400
0s - loss: 0.2798 - val_loss: 0.2562
Epoch 75/400
0s - loss: 0.2792 - val_loss: 0.2570
Epoch 76/400
0s - loss: 0.2782 - val_loss: 0.2586
Epoch 77/400
0s - loss: 0.2775 - val_loss: 0.2527
Epoch 78/400
0s - loss: 0.2766 - val_loss: 0.2653
Epoch 79/400
0s - loss: 0.2759 - val_loss: 0.2485
Epoch 80/400
0s - loss: 0.2753 - val_loss: 0.2516
Epoch 81/400
0s - loss: 0.2748 - val_loss: 0.2540
Epoch 82/400
0s - loss: 0.2742 - val_loss: 0.2477
Epoch 83/400
0s - loss: 0.2738 - val_loss: 0.2521
Epoch 84/400
0s - loss: 0.2730 - val_loss: 0.2517
Epoch 85/400
0s - loss: 0.2725 - val_loss: 0.2571
Epoch 86/400
0s - loss: 0.2720 - val_loss: 0.2507
Epoch 87/400
0s - loss: 0.2715 - val_loss: 0.2493
Epoch 88/400
0s - loss: 0.2711 - val_loss: 0.2481
Epoch 89/400
0s - loss: 0.2705 - val_loss: 0.2447
Epoch 90/400
0s - loss: 0.2701 - val_loss: 0.2414
Epoch 91/400
0s - loss: 0.2699 - val_loss: 0.2452
Epoch 92/400
0s - loss: 0.2693 - val_loss: 0.2499
Epoch 93/400
0s - loss: 0.2690 - val_loss: 0.2479
Epoch 94/400
0s - loss: 0.2687 - val_loss: 0.2485
Epoch 95/400
0s - loss: 0.2684 - val_loss: 0.2388
Epoch 96/400
0s - loss: 0.2680 - val_loss: 0.2465
Epoch 97/400
0s - loss: 0.2677 - val_loss: 0.2439
Epoch 98/400
0s - loss: 0.2674 - val_loss: 0.2514
Epoch 99/400
0s - loss: 0.2671 - val_loss: 0.2443
Epoch 100/400
0s - loss: 0.2669 - val_loss: 0.2412
Epoch 101/400
0s - loss: 0.2665 - val_loss: 0.2447
Epoch 102/400
0s - loss: 0.2665 - val_loss: 0.2505
Epoch 103/400
0s - loss: 0.2662 - val_loss: 0.2459
Epoch 104/400
0s - loss: 0.2657 - val_loss: 0.2331
Epoch 105/400
0s - loss: 0.2657 - val_loss: 0.2443
Epoch 106/400
0s - loss: 0.2656 - val_loss: 0.2355
Epoch 107/400
0s - loss: 0.2653 - val_loss: 0.2457
Epoch 108/400
0s - loss: 0.2650 - val_loss: 0.2409
Epoch 109/400
0s - loss: 0.2649 - val_loss: 0.2389
Epoch 110/400
0s - loss: 0.2646 - val_loss: 0.2382
Epoch 111/400
0s - loss: 0.2644 - val_loss: 0.2533
Epoch 112/400
0s - loss: 0.2644 - val_loss: 0.2418
Epoch 113/400
0s - loss: 0.2642 - val_loss: 0.2383
Epoch 114/400
0s - loss: 0.2638 - val_loss: 0.2329
Epoch 115/400
0s - loss: 0.2638 - val_loss: 0.2430
Epoch 116/400
0s - loss: 0.2636 - val_loss: 0.2395
Epoch 117/400
0s - loss: 0.2635 - val_loss: 0.2352
Epoch 118/400
0s - loss: 0.2634 - val_loss: 0.2393
Epoch 119/400
0s - loss: 0.2630 - val_loss: 0.2458
Epoch 120/400
0s - loss: 0.2631 - val_loss: 0.2385
Epoch 121/400
0s - loss: 0.2627 - val_loss: 0.2357
Epoch 122/400
0s - loss: 0.2629 - val_loss: 0.2356
Epoch 123/400
0s - loss: 0.2625 - val_loss: 0.2375
Epoch 124/400
0s - loss: 0.2625 - val_loss: 0.2437
Epoch 125/400
0s - loss: 0.2624 - val_loss: 0.2323
Epoch 126/400
0s - loss: 0.2622 - val_loss: 0.2445
Epoch 127/400
0s - loss: 0.2621 - val_loss: 0.2391
Epoch 128/400
0s - loss: 0.2622 - val_loss: 0.2344
Epoch 129/400
0s - loss: 0.2619 - val_loss: 0.2441
Epoch 130/400
0s - loss: 0.2620 - val_loss: 0.2383
Epoch 131/400
0s - loss: 0.2616 - val_loss: 0.2358
Epoch 132/400
0s - loss: 0.2617 - val_loss: 0.2340
Epoch 133/400
0s - loss: 0.2615 - val_loss: 0.2359
Epoch 134/400
0s - loss: 0.2613 - val_loss: 0.2320
Epoch 135/400
0s - loss: 0.2612 - val_loss: 0.2371
Epoch 136/400
0s - loss: 0.2612 - val_loss: 0.2328
Epoch 137/400
0s - loss: 0.2611 - val_loss: 0.2346
Epoch 138/400
0s - loss: 0.2610 - val_loss: 0.2305
Epoch 139/400
0s - loss: 0.2609 - val_loss: 0.2427
Epoch 140/400
0s - loss: 0.2609 - val_loss: 0.2355
Epoch 141/400
0s - loss: 0.2608 - val_loss: 0.2342
Epoch 142/400
0s - loss: 0.2607 - val_loss: 0.2337
Epoch 143/400
0s - loss: 0.2605 - val_loss: 0.2339
Epoch 144/400
0s - loss: 0.2605 - val_loss: 0.2338
Epoch 145/400
0s - loss: 0.2605 - val_loss: 0.2397
Epoch 146/400
0s - loss: 0.2604 - val_loss: 0.2297
Epoch 147/400
0s - loss: 0.2602 - val_loss: 0.2351
Epoch 148/400
0s - loss: 0.2600 - val_loss: 0.2294
Epoch 149/400
0s - loss: 0.2600 - val_loss: 0.2364
Epoch 150/400
0s - loss: 0.2600 - val_loss: 0.2288
Epoch 151/400
0s - loss: 0.2599 - val_loss: 0.2312
Epoch 152/400
0s - loss: 0.2598 - val_loss: 0.2358
Epoch 153/400
0s - loss: 0.2597 - val_loss: 0.2295
Epoch 154/400
0s - loss: 0.2595 - val_loss: 0.2333
Epoch 155/400
0s - loss: 0.2596 - val_loss: 0.2295
Epoch 156/400
0s - loss: 0.2595 - val_loss: 0.2280
Epoch 157/400
0s - loss: 0.2594 - val_loss: 0.2346
Epoch 158/400
0s - loss: 0.2592 - val_loss: 0.2389
Epoch 159/400
0s - loss: 0.2593 - val_loss: 0.2252
Epoch 160/400
0s - loss: 0.2594 - val_loss: 0.2341
Epoch 161/400
0s - loss: 0.2592 - val_loss: 0.2358
Epoch 162/400
0s - loss: 0.2591 - val_loss: 0.2301
Epoch 163/400
0s - loss: 0.2591 - val_loss: 0.2351
Epoch 164/400
0s - loss: 0.2592 - val_loss: 0.2327
Epoch 165/400
0s - loss: 0.2590 - val_loss: 0.2358
Epoch 166/400
0s - loss: 0.2589 - val_loss: 0.2361
Epoch 167/400
0s - loss: 0.2589 - val_loss: 0.2328
Epoch 168/400
0s - loss: 0.2588 - val_loss: 0.2325
Epoch 169/400
0s - loss: 0.2587 - val_loss: 0.2356
Epoch 170/400
0s - loss: 0.2586 - val_loss: 0.2282
Epoch 171/400
0s - loss: 0.2586 - val_loss: 0.2268
Epoch 172/400
0s - loss: 0.2584 - val_loss: 0.2340
Epoch 173/400
0s - loss: 0.2586 - val_loss: 0.2274
Epoch 174/400
0s - loss: 0.2586 - val_loss: 0.2324
Epoch 175/400
0s - loss: 0.2584 - val_loss: 0.2279
Epoch 176/400
0s - loss: 0.2583 - val_loss: 0.2375
Epoch 177/400
0s - loss: 0.2583 - val_loss: 0.2358
Epoch 178/400
0s - loss: 0.2582 - val_loss: 0.2282
Epoch 179/400
0s - loss: 0.2580 - val_loss: 0.2309
Epoch 180/400
0s - loss: 0.2582 - val_loss: 0.2266
Epoch 181/400
0s - loss: 0.2580 - val_loss: 0.2362
Epoch 182/400
0s - loss: 0.2580 - val_loss: 0.2330
Epoch 183/400
0s - loss: 0.2580 - val_loss: 0.2363
Epoch 184/400
0s - loss: 0.2580 - val_loss: 0.2251
Epoch 185/400
0s - loss: 0.2580 - val_loss: 0.2302
Epoch 186/400
0s - loss: 0.2580 - val_loss: 0.2323
Epoch 187/400
0s - loss: 0.2578 - val_loss: 0.2368
Epoch 188/400
0s - loss: 0.2580 - val_loss: 0.2292
Epoch 189/400
0s - loss: 0.2579 - val_loss: 0.2257
Epoch 190/400
0s - loss: 0.2578 - val_loss: 0.2285
Epoch 191/400
0s - loss: 0.2576 - val_loss: 0.2325
Epoch 192/400
0s - loss: 0.2576 - val_loss: 0.2287
Epoch 193/400
0s - loss: 0.2578 - val_loss: 0.2259
Epoch 194/400
0s - loss: 0.2575 - val_loss: 0.2263
Epoch 195/400
0s - loss: 0.2577 - val_loss: 0.2303
Epoch 196/400
0s - loss: 0.2575 - val_loss: 0.2286
Epoch 197/400
0s - loss: 0.2576 - val_loss: 0.2310
Epoch 198/400
0s - loss: 0.2575 - val_loss: 0.2303
Epoch 199/400
0s - loss: 0.2574 - val_loss: 0.2380
Epoch 200/400
0s - loss: 0.2574 - val_loss: 0.2253
Epoch 201/400
0s - loss: 0.2574 - val_loss: 0.2320
Epoch 202/400
0s - loss: 0.2573 - val_loss: 0.2330
Epoch 203/400
0s - loss: 0.2573 - val_loss: 0.2323
Epoch 204/400
0s - loss: 0.2573 - val_loss: 0.2319
Epoch 205/400
0s - loss: 0.2573 - val_loss: 0.2321
Epoch 206/400
0s - loss: 0.2571 - val_loss: 0.2294
Epoch 207/400
0s - loss: 0.2573 - val_loss: 0.2353
Epoch 208/400
0s - loss: 0.2572 - val_loss: 0.2330
Epoch 209/400
0s - loss: 0.2572 - val_loss: 0.2297
Epoch 210/400
0s - loss: 0.2571 - val_loss: 0.2261
Epoch 211/400
0s - loss: 0.2571 - val_loss: 0.2247
Epoch 212/400
0s - loss: 0.2570 - val_loss: 0.2280
Epoch 213/400
0s - loss: 0.2570 - val_loss: 0.2333
Epoch 214/400
0s - loss: 0.2571 - val_loss: 0.2271
Epoch 215/400
0s - loss: 0.2569 - val_loss: 0.2253
Epoch 216/400
0s - loss: 0.2568 - val_loss: 0.2273
Epoch 217/400
0s - loss: 0.2570 - val_loss: 0.2358
Epoch 218/400
0s - loss: 0.2570 - val_loss: 0.2295
Epoch 219/400
0s - loss: 0.2570 - val_loss: 0.2313
Epoch 220/400
0s - loss: 0.2568 - val_loss: 0.2304
Epoch 221/400
0s - loss: 0.2570 - val_loss: 0.2277
Epoch 222/400
0s - loss: 0.2567 - val_loss: 0.2277
Epoch 223/400
0s - loss: 0.2568 - val_loss: 0.2253
Epoch 224/400
0s - loss: 0.2567 - val_loss: 0.2256
Epoch 225/400
0s - loss: 0.2567 - val_loss: 0.2335
Epoch 226/400
0s - loss: 0.2567 - val_loss: 0.2263
Epoch 227/400
0s - loss: 0.2566 - val_loss: 0.2237
Epoch 228/400
0s - loss: 0.2567 - val_loss: 0.2302
Epoch 229/400
0s - loss: 0.2568 - val_loss: 0.2269
Epoch 230/400
0s - loss: 0.2567 - val_loss: 0.2303
Epoch 231/400
0s - loss: 0.2565 - val_loss: 0.2395
Epoch 232/400
0s - loss: 0.2566 - val_loss: 0.2220
Epoch 233/400
0s - loss: 0.2566 - val_loss: 0.2368
Epoch 234/400
0s - loss: 0.2567 - val_loss: 0.2260
Epoch 235/400
0s - loss: 0.2566 - val_loss: 0.2310
Epoch 236/400
0s - loss: 0.2567 - val_loss: 0.2252
Epoch 237/400
0s - loss: 0.2566 - val_loss: 0.2327
Epoch 238/400
0s - loss: 0.2565 - val_loss: 0.2269
Epoch 239/400
0s - loss: 0.2567 - val_loss: 0.2263
Epoch 240/400
0s - loss: 0.2565 - val_loss: 0.2296
Epoch 241/400
0s - loss: 0.2564 - val_loss: 0.2232
Epoch 242/400
0s - loss: 0.2567 - val_loss: 0.2259
Epoch 243/400
0s - loss: 0.2565 - val_loss: 0.2328
Epoch 244/400
0s - loss: 0.2564 - val_loss: 0.2286
Epoch 245/400
0s - loss: 0.2563 - val_loss: 0.2394
Epoch 246/400
0s - loss: 0.2566 - val_loss: 0.2271
Epoch 247/400
0s - loss: 0.2563 - val_loss: 0.2254
Epoch 248/400
0s - loss: 0.2562 - val_loss: 0.2277
Epoch 249/400
0s - loss: 0.2564 - val_loss: 0.2266
Epoch 250/400
0s - loss: 0.2564 - val_loss: 0.2346
Epoch 251/400
0s - loss: 0.2563 - val_loss: 0.2308
Epoch 252/400
0s - loss: 0.2561 - val_loss: 0.2220
Epoch 253/400
0s - loss: 0.2562 - val_loss: 0.2324
Epoch 254/400
0s - loss: 0.2562 - val_loss: 0.2252
Epoch 255/400
0s - loss: 0.2563 - val_loss: 0.2257
Epoch 256/400
0s - loss: 0.2561 - val_loss: 0.2296
Epoch 257/400
0s - loss: 0.2561 - val_loss: 0.2282
Epoch 258/400
0s - loss: 0.2562 - val_loss: 0.2296
Epoch 259/400
0s - loss: 0.2562 - val_loss: 0.2297
Epoch 260/400
0s - loss: 0.2562 - val_loss: 0.2281
Epoch 261/400
0s - loss: 0.2560 - val_loss: 0.2286
Epoch 262/400
0s - loss: 0.2560 - val_loss: 0.2257
Epoch 263/400
0s - loss: 0.2562 - val_loss: 0.2298
Epoch 264/400
0s - loss: 0.2562 - val_loss: 0.2233
Epoch 265/400
0s - loss: 0.2562 - val_loss: 0.2294
Epoch 266/400
0s - loss: 0.2560 - val_loss: 0.2249
Epoch 267/400
0s - loss: 0.2559 - val_loss: 0.2292
Epoch 268/400
0s - loss: 0.2559 - val_loss: 0.2359
Epoch 269/400
0s - loss: 0.2563 - val_loss: 0.2303
Epoch 270/400
0s - loss: 0.2559 - val_loss: 0.2317
Epoch 271/400
0s - loss: 0.2559 - val_loss: 0.2293
Epoch 272/400
0s - loss: 0.2561 - val_loss: 0.2231
Epoch 273/400
0s - loss: 0.2560 - val_loss: 0.2247
Epoch 274/400
0s - loss: 0.2560 - val_loss: 0.2319
Epoch 275/400
0s - loss: 0.2558 - val_loss: 0.2353
Epoch 276/400
0s - loss: 0.2559 - val_loss: 0.2329
Epoch 277/400
0s - loss: 0.2560 - val_loss: 0.2324
Epoch 278/400
0s - loss: 0.2559 - val_loss: 0.2285
Epoch 279/400
0s - loss: 0.2560 - val_loss: 0.2240
Epoch 280/400
0s - loss: 0.2558 - val_loss: 0.2316
Epoch 281/400
0s - loss: 0.2559 - val_loss: 0.2299
Epoch 282/400
0s - loss: 0.2558 - val_loss: 0.2319
Epoch 283/400
0s - loss: 0.2557 - val_loss: 0.2252
Epoch 284/400
0s - loss: 0.2557 - val_loss: 0.2231
Epoch 285/400
0s - loss: 0.2558 - val_loss: 0.2338
Epoch 286/400
0s - loss: 0.2558 - val_loss: 0.2282
Epoch 287/400
0s - loss: 0.2557 - val_loss: 0.2322
Epoch 288/400
0s - loss: 0.2559 - val_loss: 0.2272
Epoch 289/400
0s - loss: 0.2557 - val_loss: 0.2267
Epoch 290/400
0s - loss: 0.2558 - val_loss: 0.2273
Epoch 291/400
0s - loss: 0.2556 - val_loss: 0.2252
Epoch 292/400
0s - loss: 0.2557 - val_loss: 0.2324
Epoch 293/400
0s - loss: 0.2557 - val_loss: 0.2267
Epoch 294/400
0s - loss: 0.2555 - val_loss: 0.2200
Epoch 295/400
0s - loss: 0.2556 - val_loss: 0.2312
Epoch 296/400
0s - loss: 0.2557 - val_loss: 0.2313
Epoch 297/400
0s - loss: 0.2557 - val_loss: 0.2347
Epoch 298/400
0s - loss: 0.2557 - val_loss: 0.2284
Epoch 299/400
0s - loss: 0.2556 - val_loss: 0.2269
Epoch 300/400
0s - loss: 0.2556 - val_loss: 0.2238
Epoch 301/400
0s - loss: 0.2558 - val_loss: 0.2232
Epoch 302/400
0s - loss: 0.2555 - val_loss: 0.2255
Epoch 303/400
0s - loss: 0.2557 - val_loss: 0.2318
Epoch 304/400
0s - loss: 0.2556 - val_loss: 0.2281
Epoch 305/400
0s - loss: 0.2555 - val_loss: 0.2224
Epoch 306/400
0s - loss: 0.2555 - val_loss: 0.2308
Epoch 307/400
0s - loss: 0.2555 - val_loss: 0.2325
Epoch 308/400
0s - loss: 0.2555 - val_loss: 0.2314
Epoch 309/400
0s - loss: 0.2556 - val_loss: 0.2251
Epoch 310/400
0s - loss: 0.2554 - val_loss: 0.2249
Epoch 311/400
0s - loss: 0.2553 - val_loss: 0.2274
Epoch 312/400
0s - loss: 0.2556 - val_loss: 0.2216
Epoch 313/400
0s - loss: 0.2554 - val_loss: 0.2273
Epoch 314/400
0s - loss: 0.2554 - val_loss: 0.2256
Epoch 315/400
0s - loss: 0.2555 - val_loss: 0.2311
Epoch 316/400
0s - loss: 0.2553 - val_loss: 0.2204
Epoch 317/400
0s - loss: 0.2554 - val_loss: 0.2214
Epoch 318/400
0s - loss: 0.2554 - val_loss: 0.2247
Epoch 319/400
0s - loss: 0.2555 - val_loss: 0.2248
Epoch 320/400
0s - loss: 0.2555 - val_loss: 0.2284
Epoch 321/400
0s - loss: 0.2554 - val_loss: 0.2307
Epoch 322/400
0s - loss: 0.2556 - val_loss: 0.2301
Epoch 323/400
0s - loss: 0.2553 - val_loss: 0.2278
Epoch 324/400
0s - loss: 0.2554 - val_loss: 0.2300
Epoch 325/400
0s - loss: 0.2554 - val_loss: 0.2249
Epoch 326/400
0s - loss: 0.2554 - val_loss: 0.2256
Epoch 327/400
0s - loss: 0.2552 - val_loss: 0.2213
Epoch 328/400
0s - loss: 0.2553 - val_loss: 0.2318
Epoch 329/400
0s - loss: 0.2552 - val_loss: 0.2300
Epoch 330/400
0s - loss: 0.2552 - val_loss: 0.2244
Epoch 331/400
0s - loss: 0.2553 - val_loss: 0.2310
Epoch 332/400
0s - loss: 0.2552 - val_loss: 0.2248
Epoch 333/400
0s - loss: 0.2553 - val_loss: 0.2285
Epoch 334/400
0s - loss: 0.2554 - val_loss: 0.2294
Epoch 335/400
0s - loss: 0.2551 - val_loss: 0.2230
Epoch 336/400
0s - loss: 0.2554 - val_loss: 0.2252
Epoch 337/400
0s - loss: 0.2551 - val_loss: 0.2221
Epoch 338/400
0s - loss: 0.2551 - val_loss: 0.2263
Epoch 339/400
0s - loss: 0.2552 - val_loss: 0.2238
Epoch 340/400
0s - loss: 0.2551 - val_loss: 0.2238
Epoch 341/400
0s - loss: 0.2552 - val_loss: 0.2318
Epoch 342/400
0s - loss: 0.2550 - val_loss: 0.2244
Epoch 343/400
0s - loss: 0.2551 - val_loss: 0.2245
Epoch 344/400
0s - loss: 0.2551 - val_loss: 0.2255
Epoch 345/400
0s - loss: 0.2551 - val_loss: 0.2222
Epoch 346/400
0s - loss: 0.2551 - val_loss: 0.2250
Epoch 347/400
0s - loss: 0.2549 - val_loss: 0.2228
Epoch 348/400
0s - loss: 0.2551 - val_loss: 0.2288
Epoch 349/400
0s - loss: 0.2549 - val_loss: 0.2252
Epoch 350/400
0s - loss: 0.2551 - val_loss: 0.2265
Epoch 351/400
0s - loss: 0.2551 - val_loss: 0.2245
Epoch 352/400
0s - loss: 0.2550 - val_loss: 0.2328
Epoch 353/400
0s - loss: 0.2550 - val_loss: 0.2233
Epoch 354/400
0s - loss: 0.2551 - val_loss: 0.2305
Epoch 355/400
0s - loss: 0.2551 - val_loss: 0.2247
Epoch 356/400
0s - loss: 0.2548 - val_loss: 0.2246
Epoch 357/400
0s - loss: 0.2551 - val_loss: 0.2226
Epoch 358/400
0s - loss: 0.2549 - val_loss: 0.2293
Epoch 359/400
0s - loss: 0.2549 - val_loss: 0.2363
Epoch 360/400
0s - loss: 0.2550 - val_loss: 0.2255
Epoch 361/400
0s - loss: 0.2548 - val_loss: 0.2252
Epoch 362/400
0s - loss: 0.2550 - val_loss: 0.2327
Epoch 363/400
0s - loss: 0.2550 - val_loss: 0.2278
Epoch 364/400
0s - loss: 0.2549 - val_loss: 0.2265
Epoch 365/400
0s - loss: 0.2551 - val_loss: 0.2267
Epoch 366/400
0s - loss: 0.2549 - val_loss: 0.2254
Epoch 367/400
0s - loss: 0.2548 - val_loss: 0.2246
Epoch 368/400
0s - loss: 0.2548 - val_loss: 0.2268
Epoch 369/400
0s - loss: 0.2548 - val_loss: 0.2229
Epoch 370/400
0s - loss: 0.2549 - val_loss: 0.2287
Epoch 371/400
0s - loss: 0.2548 - val_loss: 0.2242
Epoch 372/400
0s - loss: 0.2549 - val_loss: 0.2257
Epoch 373/400
0s - loss: 0.2547 - val_loss: 0.2210
Epoch 374/400
0s - loss: 0.2549 - val_loss: 0.2259
Epoch 375/400
0s - loss: 0.2547 - val_loss: 0.2286
Epoch 376/400
0s - loss: 0.2548 - val_loss: 0.2285
Epoch 377/400
0s - loss: 0.2548 - val_loss: 0.2290
Epoch 378/400
0s - loss: 0.2549 - val_loss: 0.2260
Epoch 379/400
0s - loss: 0.2548 - val_loss: 0.2292
Epoch 380/400
0s - loss: 0.2547 - val_loss: 0.2234
Epoch 381/400
0s - loss: 0.2547 - val_loss: 0.2265
Epoch 382/400
0s - loss: 0.2547 - val_loss: 0.2214
Epoch 383/400
0s - loss: 0.2548 - val_loss: 0.2277
Epoch 384/400
0s - loss: 0.2547 - val_loss: 0.2244
Epoch 385/400
0s - loss: 0.2546 - val_loss: 0.2275
Epoch 386/400
0s - loss: 0.2548 - val_loss: 0.2249
Epoch 387/400
0s - loss: 0.2546 - val_loss: 0.2256
Epoch 388/400
0s - loss: 0.2547 - val_loss: 0.2299
Epoch 389/400
0s - loss: 0.2545 - val_loss: 0.2235
Epoch 390/400
0s - loss: 0.2549 - val_loss: 0.2259
Epoch 391/400
0s - loss: 0.2546 - val_loss: 0.2239
Epoch 392/400
0s - loss: 0.2546 - val_loss: 0.2253
Epoch 393/400
0s - loss: 0.2546 - val_loss: 0.2217
Epoch 394/400
0s - loss: 0.2547 - val_loss: 0.2287
Epoch 395/400
0s - loss: 0.2546 - val_loss: 0.2261
Epoch 396/400
0s - loss: 0.2546 - val_loss: 0.2302
Epoch 397/400
0s - loss: 0.2547 - val_loss: 0.2201
Epoch 398/400
0s - loss: 0.2546 - val_loss: 0.2224
Epoch 399/400
0s - loss: 0.2546 - val_loss: 0.2308
Epoch 400/400
0s - loss: 0.2546 - val_loss: 0.2295
=>print to file

In [48]:
X_train


Out[48]:
array([[ 43.        ,  27.        ,   4.        , ...,   1.30515949,
          0.27498514,   2.5902411 ],
       [ 34.        ,  19.        ,   3.        , ...,   0.69453573,
          0.06526334,   0.93669973],
       [ 43.        ,  29.        ,   2.        , ...,   0.70069112,
          0.32875604,   2.1209989 ],
       ..., 
       [ 45.        ,  32.99267568,  10.        , ...,   1.72722333,
          0.63001355,   2.15279162],
       [ 64.        ,  32.        ,   5.        , ...,   4.89804719,
          0.26152751,   3.37781368],
       [ 43.        ,  28.        ,   1.        , ...,   0.7349489 ,
          0.25015115,   0.58463601]])

In [34]:
#X_test = X_test.reshape(X_test.shape[0],244,1)

In [82]:
y_test=model.predict([X_test1,X_test2])

In [83]:
y_test[:,0].shape


Out[83]:
(7662,)

In [84]:
df_sub = pd.DataFrame({'id': id_test, 'price_doc': y_test[:,0]})

df_sub.to_csv('sub 2017-04-30_rmsle_para_gr1te_gr2tr_export.csv', index=False) 

#0.40908
#120/120
#0s - loss: 0.2793 - val_loss: 0.2215

In [ ]:
submitreal 12 elimstd>3 #0.40566