In [1]:
#version vx3

## new to this version
# optimize lr

## already incode
# selected features 11
# STD>3 train entry removed
# keras.layers.merge.Concatenate(axis=-1)

## already tried
# 

## other options
# optimizers, batch size, 
# dropout layer, batchnorm, dense (layers/nuron per) numbers

## future
# remove column with <.2 variance
# try individual feature then try combinations

In [2]:
#0.40094 <- 
#0.39138 <- # real,elim std>3, # 12-300-30-10-1 # test_vx1 #epoch 60/200 0s - loss: 0.3677 - val_loss: 0.2493
#0.40566 <- # real,elim std>3, # 12-300-30-10-1 # test_v1_ext #epoch 72/200 0s - loss: 0.5543 - val_loss: 0.2496
#0.48500 <- elim std>3, 12-300-30-10-1
#0.52512 <- Epoch 11/200  16s - loss: 0.4993 - val_loss: 0.3509
#0.38798 <- 12-300-30-5-1
#0.41785<- 9col grp1 loss: 0.3123 - acc: 0.0000e+00 - val_loss: 0.2602
#0.43247 <-3col 
#0.53085 <- adadelta
#0.50647 <-decay
#0.48978 <-rmsle

#task
# feature extraction
# feature selection
# optimizaer selection , lr , decay

In [3]:
### importing libraries
%matplotlib inline
from keras.layers.convolutional import Convolution2D, MaxPooling2D, Convolution1D
from keras.layers.core import Dense, Activation, Dropout, Flatten
from keras.models import Sequential
from keras.optimizers import SGD,RMSprop
from keras.datasets import mnist
from keras.layers import Concatenate,Merge
from keras.layers.normalization import BatchNormalization
from keras.callbacks import EarlyStopping

import numpy as np
import pandas as pd
from keras.wrappers.scikit_learn import KerasRegressor
from sklearn.model_selection import cross_val_score
from sklearn.model_selection import KFold
from sklearn.preprocessing import StandardScaler
from sklearn.pipeline import Pipeline
from sklearn import preprocessing

import matplotlib as mpl

#mpl.use('Agg')
import matplotlib.pyplot as plt
import time
timestr = time.strftime("%Y%m%d-%H%M%S")
print(timestr)


Using TensorFlow backend.
20170505-211827

In [4]:
def load_data():
    df_train = pd.read_csv("../input/train.csv", parse_dates=['timestamp'])
    df_test = pd.read_csv("../input/test.csv", parse_dates=['timestamp'])
    df_macro = pd.read_csv("../input/macro.csv", parse_dates=['timestamp'])
    return [df_train,df_test,df_macro]
#df_train.head()

In [5]:
#print( df_train.shape)

In [6]:
#selected features

#gr1=["full_sq", "life_sq", "floor", "max_floor", "material", 
#"build_year","num_room", "kitch_sq","state","radiation_km",
#"green_zone_km","railroad_km", "public_transport_station_km","metro_min_avto" ] 
#"kindergarden_km" 
#"sub_area" = object, str

#gr1=["full_sq"] #elbo 10 epo  26/200 0s - loss: 0.3472 - val_loss: 0.2767
#gr1=["life_sq"] #     20 epo 163/200 0s - loss: 0.4144 - val_loss: 0.3150
#gr1=["floor"]   #     55 epo  83/200 0s - loss: 0.4261 - val_loss: 0.3554


#gr1 = list(set(gr1))
#print(len(gr1))
#for c in gr1:
#    print (c+"\t"+str(df_train[c].dtype))


1
---------------------------------------------------------------------------
NameError                                 Traceback (most recent call last)
<ipython-input-6-9711c9ffc3dd> in <module>()
     15 print(len(gr1))
     16 for c in gr1:
---> 17     print (c+"\t"+str(df_train[c].dtype))

NameError: name 'df_train' is not defined

In [8]:
# This section added:  drop crazy data point
def drop_crazy_data_point(clist,df_train):
    '''drop_crazy_data_point, STD>3
    clist = columnlist to use
    df_train = pandas dataframe
    '''
    
    #clist = gr1#['life_sq','floor']
    for cname in clist:
        df_train.drop(df_train[ np.abs((df_train[cname]-df_train[cname].mean())/df_train[cname].std())>3].index, inplace=True)
    print('shape after drop_crazy_data_point:')
    print( df_train.shape)
    return df_train

In [9]:
def take_ytrain_testid_traincnt(df_train,df_test):
    y_train = df_train['price_doc'].values
    id_test = df_test['id']

    df_train.drop(['id', 'price_doc'], axis=1, inplace=True)
    df_test.drop(['id'], axis=1, inplace=True)

    num_train = len(df_train)
    df_all = pd.concat([df_train, df_test])
    df_all = df_all.join(df_macro, on='timestamp', rsuffix='_macro')
    print(df_all.shape)
    

    # Add month-year
    month_year = (df_all.timestamp.dt.month + df_all.timestamp.dt.year * 100)
    month_year_cnt_map = month_year.value_counts().to_dict()
    df_all['month_year_cnt'] = month_year.map(month_year_cnt_map)

    # Add week-year count
    week_year = (df_all.timestamp.dt.weekofyear + df_all.timestamp.dt.year * 100)
    week_year_cnt_map = week_year.value_counts().to_dict()
    df_all['week_year_cnt'] = week_year.map(week_year_cnt_map)

    # Add month and day-of-week
    df_all['month'] = df_all.timestamp.dt.month
    df_all['dow'] = df_all.timestamp.dt.dayofweek

    # Other feature engineering
    #df_all['rel_floor'] = df_all['floor'] / df_all['max_floor'].astype(float)
    #df_all['rel_kitch_sq'] = df_all['kitch_sq'] / df_all['full_sq'].astype(float)

    # Remove timestamp column (may overfit the model in train)
    df_all.drop(['timestamp', 'timestamp_macro'], axis=1, inplace=True)

    return [y_train,id_test,num_train,df_all]

In [10]:
def factor(df_all,num_train,gr1,gr2):
    factorize = lambda t: pd.factorize(t[1])[0]

    df_obj = df_all.select_dtypes(include=['object'])

    X_all = np.c_[
        df_all.select_dtypes(exclude=['object']).values,
        np.array(list(map(factorize, df_obj.iteritems()))).T
    ]
    print(X_all.shape)

    X_train = X_all[:num_train]
    X_test = X_all[num_train:]

    # Deal with categorical values
    df_numeric = df_all.select_dtypes(exclude=['object'])
    df_obj = df_all.select_dtypes(include=['object']).copy()

    for c in df_obj:
        df_obj[c] = pd.factorize(df_obj[c])[0]

    df_values = pd.concat([df_numeric, df_obj], axis=1)

    df_values=df_values.fillna(df_values.mean())
    df_values=df_values.dropna(axis="columns", how='all')

    #df_values.drop(['area_m','ID_metro'], axis=1, inplace=True)

    
    df_values1=df_values[gr1]
    df_values2=df_values[gr2]
    
    #df_values.mean()
    #df_values.shape
    
    # Convert to numpy values
    X_all1 = df_values1.values
    X_all2 = df_values2.values
    #print(X_all1.shape)

    X_train1 = X_all1[:num_train]
    X_train2 = X_all2[:num_train]
    
    X_test1 = X_all1[num_train:]
    X_test2 = X_all2[num_train:]

    #df_columns = df_values.columns.tolist
    #df_columns

    return [X_train1,X_train2,X_test1,X_test2]

In [11]:
#for c in df_values.columns:
#    if df_values[c].dtype == 'object':
#        lbl = preprocessing.LabelEncoder()
#        lbl.fit(list(df_values[c].values)) 
#        df_values[c] = lbl.transform(list(df_values[c].values))

In [12]:
#c = df_values.columns[4]
#df_values[c]
#c

In [13]:
#X_all.tofile("x_all.csv",format="%s",sep=",")

In [14]:
#df_valuesclean=df_values.dropna(axis="columns", how='any')
#df_valuesclean.shape

In [15]:
from keras.layers.merge import Concatenate
# define base model
def baseline_model(df_values1_cols,df_values2_cols):
    
    model1 = Sequential()
    model1.add(Dense(30, input_dim=df_values1_cols, activation='relu'))
    #model1.add(BatchNormalization())
    #model1.add(Dropout(0.5))
    #model1.add(Dense(60,activation="relu"))
    #model1.add(Dropout(0.25))
    model1.add(Dense(15,activation="relu"))
    #model1.add(Dropout(0.25))
    
    model2 = Sequential()
    model2.add(Dense(30, input_dim=df_values2_cols, activation='relu'))
    #model1.add(BatchNormalization())
    #model2.add(Dropout(0.5))
    #model2.add(Dense(60,activation="relu"))
    #model2.add(Dropout(0.5))
    model2.add(Dense(15,activation="relu"))
    #model2.add(Dropout(0.25))
    
    model = Sequential()
    #model.add(Concatenate([model1, model2],input_shape=(120,1)))
    model.add(Merge([model1, model2], mode='concat'))
    model.add(Dense(100,activation="relu"))
    #model.add(Dropout(0.25))
    model.add(Dense(30,activation="relu"))
    model.add(Dense(1,activation="relu"))
    # Compile model
    #sgd=SGD(lr=0.01, momentum=0.0, decay=0.0, nesterov=False)
    
    
    
#     RMSProp optimizer.

# It is recommended to leave the parameters of this optimizer at their default values (except the learning rate, which can be freely tuned).

# This optimizer is usually a good choice for recurrent neural networks.

# Arguments

# lr: float >= 0. Learning rate.
# rho: float >= 0.
# epsilon: float >= 0. Fuzz factor.
# decay: float >= 0. Learning rate decay over each update
    
    #default optimizer
    #model.compile(loss='mean_squared_logarithmic_error', \
    #              optimizer=RMSprop(lr=0.001, rho=0.9, epsilon=1e-08, decay=0.0))
    
    #lowering lr
    model.compile(loss='mean_squared_logarithmic_error', \
                  optimizer=RMSprop(lr=0.00001, rho=0.9, epsilon=1e-08, decay=0.0))
    
    #my model
    #model.compile(loss='mean_squared_logarithmic_error', optimizer=RMSprop(decay=0.0001))
        #Adadelta
        #sgd
    return [model1,model2,model]

In [35]:
[df_train,df_test,df_macro]=load_data()

In [36]:
[model1,model2,model]=baseline_model(15,15)
model1.summary()
model2.summary()
model.summary()


_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
dense_15 (Dense)             (None, 30)                480       
_________________________________________________________________
dense_16 (Dense)             (None, 15)                465       
=================================================================
Total params: 945
Trainable params: 945
Non-trainable params: 0
_________________________________________________________________
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
dense_17 (Dense)             (None, 30)                480       
_________________________________________________________________
dense_18 (Dense)             (None, 15)                465       
=================================================================
Total params: 945
Trainable params: 945
Non-trainable params: 0
_________________________________________________________________
_________________________________________________________________
Layer (type)                 Output Shape              Param #   
=================================================================
merge_3 (Merge)              (None, 30)                0         
_________________________________________________________________
dense_19 (Dense)             (None, 100)               3100      
_________________________________________________________________
dense_20 (Dense)             (None, 30)                3030      
_________________________________________________________________
dense_21 (Dense)             (None, 1)                 31        
=================================================================
Total params: 8,051
Trainable params: 8,051
Non-trainable params: 0
_________________________________________________________________
/home/minesh/anaconda3/lib/python3.6/site-packages/ipykernel/__main__.py:25: UserWarning: The `Merge` layer is deprecated and will be removed after 08/2017. Use instead layers from `keras.layers.merge`, e.g. `add`, `concatenate`, etc.

In [24]:
def trainKeras(model,X_train1,X_train2,y_train,epo):
#model.compile(loss='mean_squared_error', optimizer='sgd', metrics=['accuracy'])
    print ("^^^INFO: Fit Model^^^")
#X_train = X_train.reshape(X_train.shape[0],244,1)

    callbacks = [
        EarlyStopping(monitor='val_loss', patience=50, verbose=2)
    ]

    history = model.fit(x=[X_train1,X_train2],y= y_train, epochs=epo, batch_size=780, validation_split=0.3, verbose=2)#,callbacks=callbacks) #verbose=2 )#
    return [model,history]

In [19]:
# list all data in history
'''
print(history.history.keys())
# summarize history for accuracy
plt.plot(history.history['acc'])
plt.plot(history.history['val_acc'])
plt.title('model accuracy')
plt.ylabel('accuracy')
plt.xlabel('epoch')
plt.legend(['train', 'test'], loc='upper left')
# axes = plt.gca()
# axes.set_xlim([0,120])
# axes.set_ylim([90,100])
#plt.savefig('acc.png')  # save the figure to file
plt.show()
#plt.close()
'''


# summarize history for loss
plt.plot(history.history['loss'])
plt.plot(history.history['val_loss'])
plt.title('model loss')
plt.ylabel('loss')
plt.xlabel('epoch')
plt.legend(['train', 'test'], loc='upper left')
#plt.savefig('loss.png')
#plt.show()

plt.ylim([0, \
          min(history.history['loss'])+min(history.history['loss'])/2])
plt.show()

#plt.fig()
#plt.close()



In [30]:
from matplotlib.backends.backend_pdf import PdfPages
with PdfPages('multipage_pdf.pdf') as pdf:
    for i in range(0,10):
    # summarize history for loss
        plt.figure()
        plt.plot(history.history['loss'])
        plt.plot(history.history['val_loss'])
        plt.title('model loss')
        plt.ylabel('loss')
        plt.xlabel('epoch')
        plt.legend(['train', 'test'], loc='upper left')

        plt.ylim([0, 2])
        #plt.show()
        pdf.savefig()  # saves the current figure into a pdf page
        plt.close()


---------------------------------------------------------------------------
NameError                                 Traceback (most recent call last)
<ipython-input-30-4788bc045bd5> in <module>()
      4     # summarize history for loss
      5         plt.figure()
----> 6         plt.plot(history.history['loss'])
      7         plt.plot(history.history['val_loss'])
      8         plt.title('model loss')

NameError: name 'history' is not defined
<matplotlib.figure.Figure at 0x7f6c3b2c44e0>

In [20]:
# summarize history for loss
plt.plot(history.history['loss'])
plt.plot(history.history['val_loss'])
plt.title('model loss')
plt.ylabel('loss')
plt.xlabel('epoch')
plt.legend(['train', 'test'], loc='upper left')

plt.ylim([0, 2])
plt.show()



In [32]:
filePath="myfile.txt"

In [17]:
def writeToFile(history,filePath,columns):
    fh=open(filePath,'a')
    loss=history.history['loss']
    loss_diff=np.diff(loss)
    val_loss=history.history['val_loss']
    val_loss_diff=np.diff(val_loss)

    loss_Elbow=min(list((x for x in range(0,len(loss_diff.tolist())) if loss_diff.tolist()[x] > -0.01)))+1
    val_loss_Elbow=min(list((x for x in range(0,len(val_loss_diff.tolist())) if val_loss_diff.tolist()[x] > -0.01)))+1

    print(','.join(columns),"\tlE",loss_Elbow,"\tvlE",val_loss_Elbow,"\ttepo",len(history.history['loss']),end='',file=fh)
    print("\tloss\t{0:0.5f}".format(history.history['loss'][-1]),"\tval_loss\t{0:0.5f}".format(history.history['val_loss'][-1]),file=fh)

In [34]:
writeToFile(history,filePath,columns=['1','2'])


---------------------------------------------------------------------------
NameError                                 Traceback (most recent call last)
<ipython-input-34-04bf8de6f0a9> in <module>()
----> 1 writeToFile(history,filePath,columns=['1','2'])

NameError: name 'history' is not defined

In [35]:
#print("loss\t{0:0.5f}".format(history.history['loss'][-1]),"\tval_loss\t{0:0.5f}".format(history.history['val_loss'][-1]))

#print("%0.3f \tval_loss\t %0.5f " % history.history['loss'][-1],history.history['val_loss'][-1])

In [ ]:
#looping for features
gr4=["full_sq", "life_sq", "floor", "max_floor", "material", 
"build_year","num_room", "kitch_sq","state","radiation_km"]
gr3 =["green_zone_km","railroad_km", "public_transport_station_km","metro_min_avto" ] 

gr1=["num_room", "life_sq", "metro_min_walk", "nuclear_reactor_km", "ttk_km", "zd_vokzaly_avto_km", "sadovoe_km", "bulvar_ring_km", "kremlin_km", "stadium_km"]
gr2=["basketball_km", "ID_railroad_station_walk", "build_count_slag", "build_count_panel", "prom_part_3000", "build_count_frame",
     "build_count_before_1920", "indust_part", "raion_build_count_with_material_info",
     "build_count_1971-1995"]

gr1=gr1+gr2+gr3+gr4
gr2=["life_sq", "big_church_km", "metro_km_walk", "public_healthcare_km", "kremlin_km",
     "fitness_km", "market_shop_km", "basketball_km", "zd_vokzaly_avto_km", 
     "green_part_1500"]

clist=gr1+gr2
print("--------->",clist)
print("=> loading data:")
[df_train,df_test,df_macro]=load_data()
print("=> drop_crazy_data_point:")
df_train=drop_crazy_data_point(clist,df_train)
print("=> prepare data:")
[y_train,id_test,num_train,df_all]=take_ytrain_testid_traincnt(df_train,df_test)
print('=> handle factors')
[X_train1,X_train2,X_test1,X_test2]=factor(df_all,num_train,gr1,gr2)
print('=> model arch')
[model1,model2,model]=baseline_model(X_train1.shape[1],X_train2.shape[1])
print('=> train')
[model,history]=trainKeras(model,X_train1,X_train2,y_train,epo=7000)
print('=>print to file')
writeToFile(history,filePath="try1_concat_lr.txt",columns=clist)


---------> ['num_room', 'life_sq', 'metro_min_walk', 'nuclear_reactor_km', 'ttk_km', 'zd_vokzaly_avto_km', 'sadovoe_km', 'bulvar_ring_km', 'kremlin_km', 'stadium_km', 'basketball_km', 'ID_railroad_station_walk', 'build_count_slag', 'build_count_panel', 'prom_part_3000', 'build_count_frame', 'build_count_before_1920', 'indust_part', 'raion_build_count_with_material_info', 'build_count_1971-1995', 'green_zone_km', 'railroad_km', 'public_transport_station_km', 'metro_min_avto', 'full_sq', 'life_sq', 'floor', 'max_floor', 'material', 'build_year', 'num_room', 'kitch_sq', 'state', 'radiation_km', 'life_sq', 'big_church_km', 'metro_km_walk', 'public_healthcare_km', 'kremlin_km', 'fitness_km', 'market_shop_km', 'basketball_km', 'zd_vokzaly_avto_km', 'green_part_1500']
=> loading data:
=> drop_crazy_data_point:
shape after drop_crazy_data_point:
(21836, 292)
=> prepare data:
(29498, 390)
=> handle factors
(29498, 392)
=> model arch
=> train
^^^INFO: Fit Model^^^
/home/minesh/anaconda3/lib/python3.6/site-packages/ipykernel/__main__.py:25: UserWarning: The `Merge` layer is deprecated and will be removed after 08/2017. Use instead layers from `keras.layers.merge`, e.g. `add`, `concatenate`, etc.
Train on 15285 samples, validate on 6551 samples
Epoch 1/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 2/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 3/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 4/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 5/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 6/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 7/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 8/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 9/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 10/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 11/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 12/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 13/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 14/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 15/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 16/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 17/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 18/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 19/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 20/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 21/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 22/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 23/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 24/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 25/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 26/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 27/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 28/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 29/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 30/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 31/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 32/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 33/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 34/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 35/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 36/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 37/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 38/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 39/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 40/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 41/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 42/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 43/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 44/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 45/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 46/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 47/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 48/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 49/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 50/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 51/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 52/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 53/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 54/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 55/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 56/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 57/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 58/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 59/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 60/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 61/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 62/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 63/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 64/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 65/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 66/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 67/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 68/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 69/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 70/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 71/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 72/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 73/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 74/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 75/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 76/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 77/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 78/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 79/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 80/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 81/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 82/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 83/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 84/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 85/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 86/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 87/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 88/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 89/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 90/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 91/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 92/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 93/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 94/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 95/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 96/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 97/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 98/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 99/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 100/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 101/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 102/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 103/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 104/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 105/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 106/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 107/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 108/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 109/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 110/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 111/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 112/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 113/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 114/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 115/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 116/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 117/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 118/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 119/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 120/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 121/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 122/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 123/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 124/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 125/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 126/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 127/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 128/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 129/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 130/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 131/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 132/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 133/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 134/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 135/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 136/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 137/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 138/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 139/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 140/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 141/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 142/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 143/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 144/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 145/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 146/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 147/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 148/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 149/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 150/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 151/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 152/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 153/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 154/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 155/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 156/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 157/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 158/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 159/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 160/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 161/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 162/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 163/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 164/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 165/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 166/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 167/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 168/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 169/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 170/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 171/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 172/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 173/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 174/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 175/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 176/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 177/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 178/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 179/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 180/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 181/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 182/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 183/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 184/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 185/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 186/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 187/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 188/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 189/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 190/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 191/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 192/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 193/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 194/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 195/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 196/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 197/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 198/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 199/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 200/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 201/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 202/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 203/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 204/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 205/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 206/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 207/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 208/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 209/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 210/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 211/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 212/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 213/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 214/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 215/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 216/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 217/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 218/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 219/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 220/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 221/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 222/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 223/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 224/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 225/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 226/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 227/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 228/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 229/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 230/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 231/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 232/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 233/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 234/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 235/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 236/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 237/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 238/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 239/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 240/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 241/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 242/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 243/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 244/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 245/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 246/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 247/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 248/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 249/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 250/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 251/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 252/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 253/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 254/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 255/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 256/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 257/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 258/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 259/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 260/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 261/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 262/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 263/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 264/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 265/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 266/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 267/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 268/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 269/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 270/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 271/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 272/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 273/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 274/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 275/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 276/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 277/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 278/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 279/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 280/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 281/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 282/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 283/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 284/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 285/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 286/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 287/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 288/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 289/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 290/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 291/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 292/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 293/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 294/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 295/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 296/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 297/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 298/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 299/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 300/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 301/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 302/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 303/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 304/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 305/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 306/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 307/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 308/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 309/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 310/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 311/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 312/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 313/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 314/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 315/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 316/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 317/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 318/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 319/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 320/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 321/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 322/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 323/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 324/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 325/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 326/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 327/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 328/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 329/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 330/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 331/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 332/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 333/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 334/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 335/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 336/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 337/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 338/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 339/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 340/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 341/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 342/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 343/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 344/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 345/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 346/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 347/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 348/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 349/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 350/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 351/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 352/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 353/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 354/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 355/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 356/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 357/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 358/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 359/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 360/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 361/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 362/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 363/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 364/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 365/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 366/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 367/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 368/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 369/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 370/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 371/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 372/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 373/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 374/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 375/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 376/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 377/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 378/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 379/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 380/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 381/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 382/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 383/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 384/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 385/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 386/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 387/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 388/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 389/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 390/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 391/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 392/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 393/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 394/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 395/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 396/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 397/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 398/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 399/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 400/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 401/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 402/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 403/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 404/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 405/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 406/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 407/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 408/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 409/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 410/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 411/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 412/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 413/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 414/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 415/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 416/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 417/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 418/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 419/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 420/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 421/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 422/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 423/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 424/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 425/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 426/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 427/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 428/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 429/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 430/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 431/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 432/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 433/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 434/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 435/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 436/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 437/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 438/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 439/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 440/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 441/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 442/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 443/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 444/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 445/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 446/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 447/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 448/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 449/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 450/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 451/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 452/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 453/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 454/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 455/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 456/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 457/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 458/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 459/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 460/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 461/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 462/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 463/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 464/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 465/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 466/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 467/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 468/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 469/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 470/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 471/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 472/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 473/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 474/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 475/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 476/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 477/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 478/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 479/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 480/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 481/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 482/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 483/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 484/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 485/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 486/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 487/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 488/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 489/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 490/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 491/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 492/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 493/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 494/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 495/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 496/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 497/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 498/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 499/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 500/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 501/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 502/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 503/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 504/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 505/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 506/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 507/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 508/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 509/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 510/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 511/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 512/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 513/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 514/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 515/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 516/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 517/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 518/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 519/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 520/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 521/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 522/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 523/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 524/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 525/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 526/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 527/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 528/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 529/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 530/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 531/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 532/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 533/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 534/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 535/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 536/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 537/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 538/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 539/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 540/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 541/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 542/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 543/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 544/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 545/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 546/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 547/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 548/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 549/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 550/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 551/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 552/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 553/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 554/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 555/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 556/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 557/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 558/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 559/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 560/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 561/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 562/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 563/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 564/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 565/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 566/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 567/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 568/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 569/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 570/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 571/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 572/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 573/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 574/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 575/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 576/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 577/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 578/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 579/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 580/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 581/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 582/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 583/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 584/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 585/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 586/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 587/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 588/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 589/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 590/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 591/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 592/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 593/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 594/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 595/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 596/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 597/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 598/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 599/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 600/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 601/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 602/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 603/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 604/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 605/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 606/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 607/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 608/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 609/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 610/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 611/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 612/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 613/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 614/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 615/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 616/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 617/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 618/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 619/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 620/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 621/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 622/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 623/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 624/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 625/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 626/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 627/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 628/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 629/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 630/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 631/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 632/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 633/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 634/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 635/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 636/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 637/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 638/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 639/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 640/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 641/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 642/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 643/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 644/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 645/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 646/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 647/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 648/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 649/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 650/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 651/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 652/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 653/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 654/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 655/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 656/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 657/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 658/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 659/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 660/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 661/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 662/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 663/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 664/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 665/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 666/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 667/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 668/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 669/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 670/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 671/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 672/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 673/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 674/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 675/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 676/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 677/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 678/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 679/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 680/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 681/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 682/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 683/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 684/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 685/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 686/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 687/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 688/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 689/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 690/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 691/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 692/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 693/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 694/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 695/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 696/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 697/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 698/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 699/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 700/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 701/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 702/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 703/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 704/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 705/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 706/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 707/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 708/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 709/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 710/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 711/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 712/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 713/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 714/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 715/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 716/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 717/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 718/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 719/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 720/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 721/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 722/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 723/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 724/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 725/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 726/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 727/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 728/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 729/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 730/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 731/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 732/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 733/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 734/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 735/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 736/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 737/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 738/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 739/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 740/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 741/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 742/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 743/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 744/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 745/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 746/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 747/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 748/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 749/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 750/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 751/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 752/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 753/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 754/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 755/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 756/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 757/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 758/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 759/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 760/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 761/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 762/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 763/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 764/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 765/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 766/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 767/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 768/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 769/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 770/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 771/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 772/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 773/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 774/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 775/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 776/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 777/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 778/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 779/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 780/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 781/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 782/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 783/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 784/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 785/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 786/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 787/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 788/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 789/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 790/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 791/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 792/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 793/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 794/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 795/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 796/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 797/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 798/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 799/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 800/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 801/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 802/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 803/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 804/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 805/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 806/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 807/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 808/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 809/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 810/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 811/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 812/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 813/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 814/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 815/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 816/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 817/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 818/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 819/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 820/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 821/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 822/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 823/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 824/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 825/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 826/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 827/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 828/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 829/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 830/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 831/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 832/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 833/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 834/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 835/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 836/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 837/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 838/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 839/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 840/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 841/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 842/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 843/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 844/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 845/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 846/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 847/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 848/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 849/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 850/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 851/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 852/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 853/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 854/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 855/7000
0s - loss: 243.4148 - val_loss: 248.3149
Epoch 856/7000

In [48]:
X_train


Out[48]:
array([[ 43.        ,  27.        ,   4.        , ...,   1.30515949,
          0.27498514,   2.5902411 ],
       [ 34.        ,  19.        ,   3.        , ...,   0.69453573,
          0.06526334,   0.93669973],
       [ 43.        ,  29.        ,   2.        , ...,   0.70069112,
          0.32875604,   2.1209989 ],
       ..., 
       [ 45.        ,  32.99267568,  10.        , ...,   1.72722333,
          0.63001355,   2.15279162],
       [ 64.        ,  32.        ,   5.        , ...,   4.89804719,
          0.26152751,   3.37781368],
       [ 43.        ,  28.        ,   1.        , ...,   0.7349489 ,
          0.25015115,   0.58463601]])

In [34]:
#X_test = X_test.reshape(X_test.shape[0],244,1)

In [21]:
y_test=model.predict([X_test1,X_test2])

In [22]:
y_test[:,0].shape


Out[22]:
(7662,)

In [23]:
df_sub = pd.DataFrame({'id': id_test, 'price_doc': y_test[:,0]})

df_sub.to_csv('sub 2017-04-30_rmsle_vx3_lr_4000epo.csv', index=False) 

# 0.49433
# Epoch 4000/4000
# 0s - loss: 0.3124 - val_loss: 0.3100


#0.40908
#120/120
#0s - loss: 0.2793 - val_loss: 0.2215

In [ ]:
submitreal 12 elimstd>3 #0.40566