Load our video file into memory

Welcome to foosbot


In [1]:
#!pip install keras
#!pip install numpy
#!pip install imageio
#!pip install matplotlib
#!pip install opencv-python

In [2]:
from __future__ import print_function


from video_file import *

import importlib
try:
    importlib.reload(video_file)
except:
    pass

import cv2
import sys
import os
import csv
import numpy as np
from random import randint
from random import shuffle

from PIL import Image
import imageio
import itertools as it

import tensorflow as tf
import keras
print("Keras version %s" % keras.__version__)
from keras.models import Sequential
from keras.layers import Dense, Dropout
from keras import backend as K

print("Tensorflow version %s" % tf.__version__)

import pprint
pp = pprint.PrettyPrinter(depth=6)


# Create the image transformer
transformer = VideoTransform( zoom_range=0.1, rotation_range=20, width_shift_range=0.1, height_shift_range=0.1, shear_range= 0.1, fill_mode='nearest', vertical_flip=False, horizontal_flip=False, horizontal_flip_invert_indices = [], horizontal_flip_reverse_indices = [0,1,2,3,4,5], data_format='channels_last' )

# Paths relative to current python file.
data_path  = ".\\..\\..\\TrainingData\\Processed\\AmateurDefender\\Result\\settings.tsv"

print("Opening training frames from config %s." % (data_path))
position_rel_indexes = [0, 3] # Predict current rod positions and future position in 2 frames
frame_rel_indexes = [0] # Use only current frame as input
training = TrainingInput(transformer, data_path, position_rel_indexes, frame_rel_indexes, 0.2)
training.clear_memory()


Using TensorFlow backend.
Keras version 2.0.4
Tensorflow version 1.1.0
Opening training frames from config .\..\..\TrainingData\Processed\AmateurDefender\Result\settings.tsv.
Creating training chunk from .\..\..\TrainingData\Processed\AmateurDefender\Result\chunk0.avi
.\..\..\TrainingData\Processed\AmateurDefender\Result\chunk0.avi
added 15932 new frames for a total of 15932
Creating training chunk from .\..\..\TrainingData\Processed\AmateurDefender\Result\chunk1.avi
.\..\..\TrainingData\Processed\AmateurDefender\Result\chunk1.avi
added 22943 new frames for a total of 38875
Creating training chunk from .\..\..\TrainingData\Processed\AmateurDefender\Result\chunk2.avi
.\..\..\TrainingData\Processed\AmateurDefender\Result\chunk2.avi
added 11297 new frames for a total of 50172

In [3]:
# Define our training and validation iterators

image_height       = training.height
image_width        = training.width
image_depth        = training.depth
image_channels     = training.channels
output_size        = 3


def TrainGen():
    while True:
        #print("TrainGen restarting training input.")
        training.move_first_training_frame()
        (frames, output) = training.get_next_training_frame()
        while frames is not None:
            yield (frames, output)
            (frames, output) = training.get_next_training_frame()
            
def ValidateGen():
    while True:
        #print("Validation restarting training input.")
        training.move_first_validation_frame()
        (frames, output) = training.get_next_validation_frame()
        while frames is not None:
            yield (frames, output)
            (frames, output) = training.get_next_validation_frame()

# Generators for training the position
def TrainBatchGen(batch_size):
    gen = TrainGen()
    while True:
        # Build the next batch
        batch_frames = np.zeros(shape=(batch_size, image_depth, image_height, image_width, image_channels), dtype=np.float32)
        batch_outputs = np.zeros(shape=(batch_size, 3), dtype=np.float32)
        for i in range(batch_size):
            (frames, output) = next(gen)
            batch_frames[i,:,:,:,:] = frames
            batch_outputs[i,:] = output[0:3] # Train just the 3 current rod positions as outputs
            #batch_outputs[i,:] = output[3:6] - output[0:3] # Train the difference in the three rod positions as output
            #batch_outputs[i,:] = output
            
        
        #pp.pprint("Yielding batch")
        #pp.pprint(batch_outputs)
        yield (batch_frames, batch_outputs)
        #pp.pprint("Yielded batch")

def ValidateBatchGen(batch_size):
    gen = ValidateGen()
    while True:
        # Build the next batch
        batch_frames = np.zeros(shape=(batch_size, image_depth, image_height, image_width, image_channels), dtype=np.float32)
        batch_outputs = np.zeros(shape=(batch_size, 3), dtype=np.float32)
        for i in range(batch_size):
            (frames, output) = next(gen)
            batch_frames[i,:,:,:,:] = frames
            batch_outputs[i,:] = output[0:3] # Train just the 3 current rod positions as outputs
            #batch_outputs[i,:] = output[3:6] - output[0:3] # Train the difference in the three rod positions as output
            #batch_outputs[i,:] = output
        
        #pp.pprint("Yielding batch")
        #pp.pprint(batch_outputs)
        yield (batch_frames, batch_outputs)
        #pp.pprint("Yielded batch")
            
# Generators for training the difference in position
def TrainBatchGenDpos(batch_size):
    gen = TrainGen()
    while True:
        # Build the next batch
        batch_frames = np.zeros(shape=(batch_size, image_depth, image_height, image_width, image_channels), dtype=np.float32)
        batch_outputs = np.zeros(shape=(batch_size, 3), dtype=np.float32)
        for i in range(batch_size):
            (frames, output) = next(gen)
            batch_frames[i,:,:,:,:] = frames
            
            # Decide if this dpos is valid by looking for a single frame position change > 0.1
            #max_dpos = 0.0
            #for i in range(len(batch_frames)/3):
            
            #batch_outputs[i,:] = output[0:3] # Train just the 3 current rod positions as outputs
            batch_outputs[i,0] = output[3] - output[0] # Train the difference in the three rod positions as output
            batch_outputs[i,1] = output[4] - output[1]
            batch_outputs[i,2] = output[5] - output[2]
            #batch_outputs[i,:] = output
            
        
        #pp.pprint("Yielding batch")
        #pp.pprint(batch_outputs)
        yield (batch_frames, batch_outputs)
        #pp.pprint("Yielded batch")

def ValidateBatchGenDpos(batch_size):
    gen = ValidateGen()
    while True:
        # Build the next batch
        batch_frames = np.zeros(shape=(batch_size, image_depth, image_height, image_width, image_channels), dtype=np.float32)
        batch_outputs = np.zeros(shape=(batch_size, 3), dtype=np.float32)
        for i in range(batch_size):
            (frames, output) = next(gen)
            batch_frames[i,:,:,:,:] = frames
            #batch_outputs[i,:] = output[0:3] # Train just the 3 current rod positions as outputs
            batch_outputs[i,0] = output[3] - output[0] # Train the difference in the three rod positions as output
            batch_outputs[i,1] = output[4] - output[1]
            batch_outputs[i,2] = output[5] - output[2]
            #batch_outputs[i,:] = output
        
        #pp.pprint("Yielding batch")
        #pp.pprint(batch_outputs)
        yield (batch_frames, batch_outputs)
        #pp.pprint("Yielded batch")


# Helper function to plot our validation result
import matplotlib
import matplotlib.image as mpimg
import matplotlib.pyplot as plt
import cv2
import pandas as pd
%matplotlib inline


def plot_validate(model, frames, output_true, name):
    #(frames, outputs_true) = next(ValidateBatchGen(2000))
    #frames = np.squeeze(frames, axis=(1,))
    #validate_in, validate_out
    #frames = validate_in
    #outputs_true =validate_out
    outputs_predicted = model.predict(frames, batch_size=32, verbose=1)
    print("Predicted.")
    
    
    pp.pprint(outputs_true)
    pp.pprint(outputs_predicted)
    
    
    plt.figure(figsize=(8,30))
    count = len(frames)
    
    plt.subplot(611)
    plt.plot(range(count),outputs_true[0:count,0], range(count),outputs_predicted[0:count,0] )
    plt.ylabel("Rod 1: %s" % name)
    plt.title("First 200 output recordings")
    plt.grid(True)
    
    plt.subplot(612)
    plt.plot(range(count),outputs_true[0:count,1], range(count),outputs_predicted[0:count,1] )
    plt.ylabel("Rod 2: %s" % name)
    plt.title("First output recordings")
    plt.grid(True)
    
    plt.subplot(613)
    plt.plot(range(count),outputs_true[0:count,2], range(count),outputs_predicted[0:count,2] )
    plt.ylabel("Rod 3: %s" % name)
    plt.title("First output recordings")
    plt.grid(True)
    
    
    
    plt.figure(figsize=(8,30))
    plt.subplot(611)
    
    true, predicted = zip(*sorted(zip(outputs_true[0:count,0], outputs_predicted[0:count,0])))
    plt.plot(range(count),true, range(count),predicted )
    plt.ylabel("Rod 1: %s" % name)
    plt.title("First 200 output recordings")
    plt.grid(True)
    
    plt.subplot(612)
    true, predicted = zip(*sorted(zip(outputs_true[0:count,1], outputs_predicted[0:count,1])))
    plt.plot(range(count),true, range(count),predicted, marker='.', markersize = 2, linewidth =0.1, markerfacecolor='black')
    plt.ylabel("Rod 2: %s" % name)
    plt.grid(True)
    
    plt.subplot(613)
    true, predicted = zip(*sorted(zip(outputs_true[0:count,2], outputs_predicted[0:count,2])))
    plt.plot(range(count),true, range(count),predicted, marker='.', markersize = 2, linewidth =0.1, markerfacecolor='black')
    plt.ylabel("Rod 3: %s" % name)
    plt.grid(True)
    
    
    plt.show()

Input training frame


In [4]:
import matplotlib
import matplotlib.image as mpimg
import matplotlib.pyplot as plt
import cv2
import pandas as pd
%matplotlib inline

training.move_first_training_frame()

for k in range(10):
    (frame, position) = training.get_next_training_frame()
    data = np.zeros(shape=(np.shape(frame)[1], np.shape(frame)[2] * np.shape(frame)[0], 3), dtype=np.float32)
    for i in range(np.shape(frame)[0]):
        tmp = frame[i,:,:,:]
        data[:,i*np.shape(frame)[2]:(i+1)*np.shape(frame)[2],:] = tmp


    plt.imshow(data)
    plt.show()
    pp.pprint(position)

training.move_first_training_frame()

print("Shape of training input:")
pp.pprint(np.shape(frame))

print("Shape of training output:")
pp.pprint(np.shape(position))

print("Corresponding Positions:")
pd.DataFrame(position)
pp.pprint(position)


[0.4303030303030303,
 0.1680327868852459,
 0.22162162162162163,
 0.4303030303030303,
 0.1680327868852459,
 0.22162162162162163]
[0.4303030303030303,
 0.1680327868852459,
 0.22162162162162163,
 0.4303030303030303,
 0.1680327868852459,
 0.22702702702702704]
[0.4303030303030303,
 0.1680327868852459,
 0.22162162162162163,
 0.4303030303030303,
 0.1680327868852459,
 0.22702702702702704]
[0.4303030303030303,
 0.1680327868852459,
 0.22162162162162163,
 0.4303030303030303,
 0.1680327868852459,
 0.22702702702702704]
[0.4303030303030303,
 0.1680327868852459,
 0.22702702702702704,
 0.4303030303030303,
 0.1680327868852459,
 0.22702702702702704]
[0.4303030303030303,
 0.1680327868852459,
 0.22702702702702704,
 0.4303030303030303,
 0.1680327868852459,
 0.22702702702702704]
[0.4303030303030303,
 0.1680327868852459,
 0.22702702702702704,
 0.4303030303030303,
 0.1680327868852459,
 0.22702702702702704]
[0.4303030303030303,
 0.1680327868852459,
 0.22702702702702704,
 0.4303030303030303,
 0.1680327868852459,
 0.22702702702702704]
[0.4303030303030303,
 0.1680327868852459,
 0.22702702702702704,
 0.4303030303030303,
 0.1680327868852459,
 0.22162162162162163]
[0.4303030303030303,
 0.1680327868852459,
 0.22702702702702704,
 0.4303030303030303,
 0.1762295081967213,
 0.22162162162162163]
Shape of training input:
(1, 54, 100, 3)
Shape of training output:
(6,)
Corresponding Positions:
[0.4303030303030303,
 0.1680327868852459,
 0.22702702702702704,
 0.4303030303030303,
 0.1762295081967213,
 0.22162162162162163]

Specify the model structure we will use


In [5]:
from keras.models import Sequential
from keras.layers import *
from keras.models import Model


# Build the model
pp.pprint("Input shape without batches:")
pp.pprint((image_height, image_width, image_channels))


# Build a functional model design
inputs = Input(shape=(1, image_height, image_width, image_channels,))
x = Conv3D(124,
           kernel_size = (1, 5, 5),
           padding = "same")(inputs)
x = Activation('relu')(x)

x = Conv3D(124,
           kernel_size = (1, 5, 5),
           padding = "same")(x)
x = Activation('relu')(x)

# Split into a horizontal detail and vertical detailed CNN paths
x_height_detailed = MaxPooling3D( pool_size=(1, 2, 1))(x) # (?, 1, 54, 100, 128, 3 )

x_height_detailed = Conv3D(124,
           kernel_size = (1, 3, 3),
           padding = "same",
           activation = "relu")(x_height_detailed)
x_height_detailed = Conv3D(124,
           kernel_size = (1, 3, 3),
           padding = "same",
           activation = "relu")(x_height_detailed)
x_height_detailed = MaxPooling3D( pool_size=(1, 2, 1))(x_height_detailed)

x_height_detailed = Conv3D(124,
           kernel_size = (1, 3, 3),
           padding = "same",
           activation = "relu")(x_height_detailed)
x_height_detailed = Conv3D(124,
           kernel_size = (1, 3, 3),
           padding = "same",
           activation = "relu")(x_height_detailed)
x_height_detailed = MaxPooling3D( pool_size=(1, 2, 1))(x_height_detailed)


x_height_detailed = Conv3D(124,
           kernel_size = (1, 3, 3),
           padding = "same",
           activation = "relu")(x_height_detailed)
x_height_detailed = Conv3D(124,
           kernel_size = (1, 3, 3),
           padding = "same",
           activation = "relu")(x_height_detailed)
x_height_detailed = MaxPooling3D( pool_size=(1, 1, 2))(x_height_detailed)

x_height_detailed = Conv3D(124,
           kernel_size = (1, 3, 3),
           padding = "same",
           activation = "relu")(x_height_detailed)
x_height_detailed = Conv3D(124,
           kernel_size = (1, 3, 3),
           padding = "same",
           activation = "relu")(x_height_detailed)
x_height_detailed = MaxPooling3D( pool_size=(1, 1, 2))(x_height_detailed)

x_height_detailed = Conv3D(124,
           kernel_size = (1, 3, 3),
           padding = "same",
           activation = "relu")(x_height_detailed)
x_height_detailed = Conv3D(124,
           kernel_size = (1, 3, 3),
           padding = "same",
           activation = "relu")(x_height_detailed)
x_height_detailed = MaxPooling3D( pool_size=(1, 1, 2))(x_height_detailed)

x_height_detailed = Conv3D(124,
           kernel_size = (1, 3, 3),
           padding = "same",
           activation = "relu")(x_height_detailed)
x_height_detailed = Conv3D(124,
           kernel_size = (1, 3, 3),
           padding = "same",
           activation = "relu")(x_height_detailed)
x_height_detailed = MaxPooling3D( pool_size=(1, 1, 2))(x_height_detailed)


x_height_detailed = Flatten()(x_height_detailed)





x_width_detailed = MaxPooling3D( pool_size=(1, 1, 2))(x) # (?, 1, 54, 100, 128, 3 )

x_width_detailed = Conv3D(124,
           kernel_size = (1, 3, 3),
           padding = "same",
           activation = "relu")(x_width_detailed)
x_width_detailed = Conv3D(124,
           kernel_size = (1, 3, 3),
           padding = "same",
           activation = "relu")(x_width_detailed)
x_width_detailed = MaxPooling3D( pool_size=(1, 1, 2))(x_width_detailed)

x_width_detailed = Conv3D(124,
           kernel_size = (1, 3, 3),
           padding = "same",
           activation = "relu")(x_width_detailed)
x_width_detailed = Conv3D(124,
           kernel_size = (1, 3, 3),
           padding = "same",
           activation = "relu")(x_width_detailed)
x_width_detailed = MaxPooling3D( pool_size=(1, 1, 2))(x_width_detailed)

x_width_detailed = Conv3D(124,
           kernel_size = (1, 3, 3),
           padding = "same",
           activation = "relu")(x_width_detailed)
x_width_detailed = Conv3D(124,
           kernel_size = (1, 3, 3),
           padding = "same",
           activation = "relu")(x_width_detailed)
x_width_detailed = MaxPooling3D( pool_size=(1, 1, 2))(x_width_detailed)

x_width_detailed = Conv3D(124,
           kernel_size = (1, 3, 3),
           padding = "same",
           activation = "relu")(x_width_detailed)
x_width_detailed = Conv3D(124,
           kernel_size = (1, 3, 3),
           padding = "same",
           activation = "relu")(x_width_detailed)
x_width_detailed = MaxPooling3D( pool_size=(1, 2, 1))(x_width_detailed)

x_width_detailed = Conv3D(124,
           kernel_size = (1, 3, 3),
           padding = "same",
           activation = "relu")(x_width_detailed)
x_width_detailed = Conv3D(124,
           kernel_size = (1, 3, 3),
           padding = "same",
           activation = "relu")(x_width_detailed)
x_width_detailed = MaxPooling3D( pool_size=(1, 2, 1))(x_width_detailed)

x_width_detailed = Conv3D(124,
           kernel_size = (1, 3, 3),
           padding = "same",
           activation = "relu")(x_width_detailed)
x_width_detailed = Conv3D(124,
           kernel_size = (1, 3, 3),
           padding = "same",
           activation = "relu")(x_width_detailed)
x_width_detailed = MaxPooling3D( pool_size=(1, 2, 1))(x_width_detailed)



x_width_detailed = Flatten()(x_width_detailed)


x = keras.layers.concatenate([x_height_detailed, x_width_detailed])

#x = Flatten()(x_height_detailed)
x = Dense(128, activation='relu')(x)
x = Dropout(0.5)(x)
x = Dense(128, activation='relu')(x)
x = Dropout(0.5)(x)
x = Dense(64, activation='relu')(x)
x = Dropout(0.5)(x)
predictions = Dense(3, activation='linear')(x)

model = Model(inputs=inputs, outputs=predictions)


#epoch = 45
#WEIGHTS_FNAME = 'config5_iter%i.hdf'
#model.load_weights(WEIGHTS_FNAME % epoch)
#print("Loaded model.")

#model.optimizer.lr.assign(0.00000001)

# For a multi-class classification problem
model.compile(optimizer=keras.optimizers.RMSprop(lr=0.0001),
              loss='mean_squared_error',
              metrics=['accuracy'])

model.summary()


'Input shape without batches:'
(54, 100, 3)
____________________________________________________________________________________________________
Layer (type)                     Output Shape          Param #     Connected to                     
====================================================================================================
input_1 (InputLayer)             (None, 1, 54, 100, 3) 0                                            
____________________________________________________________________________________________________
conv3d_1 (Conv3D)                (None, 1, 54, 100, 12 9424        input_1[0][0]                    
____________________________________________________________________________________________________
activation_1 (Activation)        (None, 1, 54, 100, 12 0           conv3d_1[0][0]                   
____________________________________________________________________________________________________
conv3d_2 (Conv3D)                (None, 1, 54, 100, 12 384524      activation_1[0][0]               
____________________________________________________________________________________________________
activation_2 (Activation)        (None, 1, 54, 100, 12 0           conv3d_2[0][0]                   
____________________________________________________________________________________________________
max_pooling3d_1 (MaxPooling3D)   (None, 1, 27, 100, 12 0           activation_2[0][0]               
____________________________________________________________________________________________________
max_pooling3d_8 (MaxPooling3D)   (None, 1, 54, 50, 124 0           activation_2[0][0]               
____________________________________________________________________________________________________
conv3d_3 (Conv3D)                (None, 1, 27, 100, 12 138508      max_pooling3d_1[0][0]            
____________________________________________________________________________________________________
conv3d_15 (Conv3D)               (None, 1, 54, 50, 124 138508      max_pooling3d_8[0][0]            
____________________________________________________________________________________________________
conv3d_4 (Conv3D)                (None, 1, 27, 100, 12 138508      conv3d_3[0][0]                   
____________________________________________________________________________________________________
conv3d_16 (Conv3D)               (None, 1, 54, 50, 124 138508      conv3d_15[0][0]                  
____________________________________________________________________________________________________
max_pooling3d_2 (MaxPooling3D)   (None, 1, 13, 100, 12 0           conv3d_4[0][0]                   
____________________________________________________________________________________________________
max_pooling3d_9 (MaxPooling3D)   (None, 1, 54, 25, 124 0           conv3d_16[0][0]                  
____________________________________________________________________________________________________
conv3d_5 (Conv3D)                (None, 1, 13, 100, 12 138508      max_pooling3d_2[0][0]            
____________________________________________________________________________________________________
conv3d_17 (Conv3D)               (None, 1, 54, 25, 124 138508      max_pooling3d_9[0][0]            
____________________________________________________________________________________________________
conv3d_6 (Conv3D)                (None, 1, 13, 100, 12 138508      conv3d_5[0][0]                   
____________________________________________________________________________________________________
conv3d_18 (Conv3D)               (None, 1, 54, 25, 124 138508      conv3d_17[0][0]                  
____________________________________________________________________________________________________
max_pooling3d_3 (MaxPooling3D)   (None, 1, 6, 100, 124 0           conv3d_6[0][0]                   
____________________________________________________________________________________________________
max_pooling3d_10 (MaxPooling3D)  (None, 1, 54, 12, 124 0           conv3d_18[0][0]                  
____________________________________________________________________________________________________
conv3d_7 (Conv3D)                (None, 1, 6, 100, 124 138508      max_pooling3d_3[0][0]            
____________________________________________________________________________________________________
conv3d_19 (Conv3D)               (None, 1, 54, 12, 124 138508      max_pooling3d_10[0][0]           
____________________________________________________________________________________________________
conv3d_8 (Conv3D)                (None, 1, 6, 100, 124 138508      conv3d_7[0][0]                   
____________________________________________________________________________________________________
conv3d_20 (Conv3D)               (None, 1, 54, 12, 124 138508      conv3d_19[0][0]                  
____________________________________________________________________________________________________
max_pooling3d_4 (MaxPooling3D)   (None, 1, 6, 50, 124) 0           conv3d_8[0][0]                   
____________________________________________________________________________________________________
max_pooling3d_11 (MaxPooling3D)  (None, 1, 54, 6, 124) 0           conv3d_20[0][0]                  
____________________________________________________________________________________________________
conv3d_9 (Conv3D)                (None, 1, 6, 50, 124) 138508      max_pooling3d_4[0][0]            
____________________________________________________________________________________________________
conv3d_21 (Conv3D)               (None, 1, 54, 6, 124) 138508      max_pooling3d_11[0][0]           
____________________________________________________________________________________________________
conv3d_10 (Conv3D)               (None, 1, 6, 50, 124) 138508      conv3d_9[0][0]                   
____________________________________________________________________________________________________
conv3d_22 (Conv3D)               (None, 1, 54, 6, 124) 138508      conv3d_21[0][0]                  
____________________________________________________________________________________________________
max_pooling3d_5 (MaxPooling3D)   (None, 1, 6, 25, 124) 0           conv3d_10[0][0]                  
____________________________________________________________________________________________________
max_pooling3d_12 (MaxPooling3D)  (None, 1, 27, 6, 124) 0           conv3d_22[0][0]                  
____________________________________________________________________________________________________
conv3d_11 (Conv3D)               (None, 1, 6, 25, 124) 138508      max_pooling3d_5[0][0]            
____________________________________________________________________________________________________
conv3d_23 (Conv3D)               (None, 1, 27, 6, 124) 138508      max_pooling3d_12[0][0]           
____________________________________________________________________________________________________
conv3d_12 (Conv3D)               (None, 1, 6, 25, 124) 138508      conv3d_11[0][0]                  
____________________________________________________________________________________________________
conv3d_24 (Conv3D)               (None, 1, 27, 6, 124) 138508      conv3d_23[0][0]                  
____________________________________________________________________________________________________
max_pooling3d_6 (MaxPooling3D)   (None, 1, 6, 12, 124) 0           conv3d_12[0][0]                  
____________________________________________________________________________________________________
max_pooling3d_13 (MaxPooling3D)  (None, 1, 13, 6, 124) 0           conv3d_24[0][0]                  
____________________________________________________________________________________________________
conv3d_13 (Conv3D)               (None, 1, 6, 12, 124) 138508      max_pooling3d_6[0][0]            
____________________________________________________________________________________________________
conv3d_25 (Conv3D)               (None, 1, 13, 6, 124) 138508      max_pooling3d_13[0][0]           
____________________________________________________________________________________________________
conv3d_14 (Conv3D)               (None, 1, 6, 12, 124) 138508      conv3d_13[0][0]                  
____________________________________________________________________________________________________
conv3d_26 (Conv3D)               (None, 1, 13, 6, 124) 138508      conv3d_25[0][0]                  
____________________________________________________________________________________________________
max_pooling3d_7 (MaxPooling3D)   (None, 1, 6, 6, 124)  0           conv3d_14[0][0]                  
____________________________________________________________________________________________________
max_pooling3d_14 (MaxPooling3D)  (None, 1, 6, 6, 124)  0           conv3d_26[0][0]                  
____________________________________________________________________________________________________
flatten_1 (Flatten)              (None, 4464)          0           max_pooling3d_7[0][0]            
____________________________________________________________________________________________________
flatten_2 (Flatten)              (None, 4464)          0           max_pooling3d_14[0][0]           
____________________________________________________________________________________________________
concatenate_1 (Concatenate)      (None, 8928)          0           flatten_1[0][0]                  
                                                                   flatten_2[0][0]                  
____________________________________________________________________________________________________
dense_1 (Dense)                  (None, 128)           1142912     concatenate_1[0][0]              
____________________________________________________________________________________________________
dropout_1 (Dropout)              (None, 128)           0           dense_1[0][0]                    
____________________________________________________________________________________________________
dense_2 (Dense)                  (None, 128)           16512       dropout_1[0][0]                  
____________________________________________________________________________________________________
dropout_2 (Dropout)              (None, 128)           0           dense_2[0][0]                    
____________________________________________________________________________________________________
dense_3 (Dense)                  (None, 64)            8256        dropout_2[0][0]                  
____________________________________________________________________________________________________
dropout_3 (Dropout)              (None, 64)            0           dense_3[0][0]                    
____________________________________________________________________________________________________
dense_4 (Dense)                  (None, 3)             195         dropout_3[0][0]                  
====================================================================================================
Total params: 4,886,015
Trainable params: 4,886,015
Non-trainable params: 0
____________________________________________________________________________________________________

Train our model to identify the rod positions


In [6]:
def mse(y_true, y_pred):
    return K.square(y_pred - y_true)*0.001 # Hackjob so Keras iterations show exponential value of MSE to get precision.


model.compile(optimizer=keras.optimizers.RMSprop(lr=0.0001),
              loss='mean_squared_error',
              metrics=[mse])

print("Updated learner.")

# Train the model to predict the future position. This is the control signal to the robot AI
WEIGHTS_FNAME = 'pos_cnn_weights_%i.hdf'
MODELS_FNAME = 'pos_cnn_models_%i.h5'

batch_size = 30
batches_training_per_epoch = int(training.get_training_count() / batch_size)
batches_validation_per_epoch = int(training.get_validation_count() / batch_size)
print("%i training batches, %i validation batches" % (batches_training_per_epoch, batches_validation_per_epoch) )

for epoch in range(15):
    try:
        model.fit_generator(TrainBatchGen(batch_size), batches_training_per_epoch, epochs=epoch+1, verbose=1, callbacks=None, class_weight=None, max_q_size=10, workers=1, validation_data=ValidateBatchGen(batch_size), validation_steps = batches_validation_per_epoch, pickle_safe=False, initial_epoch=epoch)
        model.save_weights(WEIGHTS_FNAME % epoch)
        model.save(MODELS_FNAME % epoch)
        print(("Wrote model to " + WEIGHTS_FNAME )  % epoch)
    except KeyboardInterrupt:
        print("\r\nUser stopped the training.")
        break


Updated learner.
1337 training batches, 334 validation batches
Epoch 1/1
1337/1337 [==============================] - 1154s - loss: 0.1691 - mse: 1.6905e-04 - val_loss: 0.0820 - val_mse: 8.1953e-05
Wrote model to pos_cnn_weights_0.hdf
Epoch 2/2
1337/1337 [==============================] - 1120s - loss: 0.0702 - mse: 7.0221e-05 - val_loss: 0.0792 - val_mse: 7.9183e-05
Wrote model to pos_cnn_weights_1.hdf
Epoch 3/3
1337/1337 [==============================] - 1118s - loss: 0.0566 - mse: 5.6631e-05 - val_loss: 0.0548 - val_mse: 5.4826e-05
Wrote model to pos_cnn_weights_2.hdf
Epoch 4/4
1337/1337 [==============================] - 1117s - loss: 0.0480 - mse: 4.8037e-05 - val_loss: 0.0784 - val_mse: 7.8403e-05
Wrote model to pos_cnn_weights_3.hdf
Epoch 5/5
1337/1337 [==============================] - 1117s - loss: 0.0431 - mse: 4.3113e-05 - val_loss: 0.0470 - val_mse: 4.6957e-05
Wrote model to pos_cnn_weights_4.hdf
Epoch 6/6
1337/1337 [==============================] - 1117s - loss: 0.0383 - mse: 3.8313e-05 - val_loss: 0.0423 - val_mse: 4.2286e-05
Wrote model to pos_cnn_weights_5.hdf
Epoch 7/7
1337/1337 [==============================] - 1117s - loss: 0.0358 - mse: 3.5836e-05 - val_loss: 0.0402 - val_mse: 4.0199e-05
Wrote model to pos_cnn_weights_6.hdf
Epoch 8/8
1337/1337 [==============================] - 1118s - loss: 0.0342 - mse: 3.4173e-05 - val_loss: 0.0337 - val_mse: 3.3683e-05
Wrote model to pos_cnn_weights_7.hdf
Epoch 9/9
1337/1337 [==============================] - 1118s - loss: 0.0329 - mse: 3.2933e-05 - val_loss: 0.0348 - val_mse: 3.4838e-05
Wrote model to pos_cnn_weights_8.hdf
Epoch 10/10
1337/1337 [==============================] - 1117s - loss: 0.0315 - mse: 3.1474e-05 - val_loss: 0.0350 - val_mse: 3.4982e-05
Wrote model to pos_cnn_weights_9.hdf
Epoch 11/11
1337/1337 [==============================] - 1117s - loss: 0.0302 - mse: 3.0208e-05 - val_loss: 0.0340 - val_mse: 3.3984e-05
Wrote model to pos_cnn_weights_10.hdf
Epoch 12/12
1337/1337 [==============================] - 1117s - loss: 0.0293 - mse: 2.9298e-05 - val_loss: 0.0327 - val_mse: 3.2674e-05
Wrote model to pos_cnn_weights_11.hdf
Epoch 13/13
1337/1337 [==============================] - 1117s - loss: 0.0289 - mse: 2.8926e-05 - val_loss: 0.0320 - val_mse: 3.1989e-05
Wrote model to pos_cnn_weights_12.hdf
Epoch 14/14
1337/1337 [==============================] - 1117s - loss: 0.0282 - mse: 2.8168e-05 - val_loss: 0.0301 - val_mse: 3.0093e-05
Wrote model to pos_cnn_weights_13.hdf
Epoch 15/15
1337/1337 [==============================] - 1117s - loss: 0.0279 - mse: 2.7851e-05 - val_loss: 0.0338 - val_mse: 3.3783e-05
Wrote model to pos_cnn_weights_14.hdf

Test the best trained model's ability to extract the rod positions


In [8]:
# Load the best model result
epoch = 4
WEIGHTS_FNAME = 'pos_cnn_weights_%i.hdf'
model.load_weights(WEIGHTS_FNAME % epoch)
print("Loaded model.")


Loaded model.

In [7]:
# Plot the real versus predicted values for some of the validation data
(frames, outputs_true) = next(ValidateBatchGen(2000))
plot_validate(model, frames, outputs_true, "Position prediction")


2000/2000 [==============================] - 22s    
Predicted.
array([[ 0.30303031,  0.46311477,  0.58918917],
       [ 0.32727271,  0.46311477,  0.58918917],
       [ 0.34545454,  0.46721312,  0.60540539],
       ..., 
       [ 0.58787876,  0.68032789,  0.08108108],
       [ 0.58181816,  0.67622954,  0.08108108],
       [ 0.58181816,  0.67622954,  0.08108108]], dtype=float32)
array([[ 0.25930452,  0.45562249,  0.30997509],
       [ 0.2766003 ,  0.46460703,  0.31523302],
       [ 0.3049469 ,  0.48209572,  0.33907735],
       ..., 
       [ 0.48702449,  0.60471743,  0.11094502],
       [ 0.48738796,  0.61083114,  0.11796197],
       [ 0.48130915,  0.62223399,  0.12320357]], dtype=float32)

Transfer the model as a base to predict the future change in rod positions

The theory is that the model has designed filters that are able to successfully extract the rod positions. This is important information needed to make a good decision on how the rods should be moved. We take this base model which has learned how to track the rod positions as a base for the real AI that predicts the rod movements that are going to occur next.


In [8]:
# Load the best position prediction model as the starting point
epoch = 10
WEIGHTS_FNAME = 'pos_cnn_weights_%i.hdf'
model.load_weights(WEIGHTS_FNAME % epoch)
print("Loaded model.")


Loaded model.

In [8]:
def mse(y_true, y_pred):
    return K.square(y_pred - y_true)*0.001 # Hackjob so Keras iterations show exponential value of MSE to get precision.


model.compile(optimizer=keras.optimizers.RMSprop(lr=0.0001),
              loss='mean_squared_error',
              metrics=[mse])

print("Updated learner.")

# Train the model to predict the future position. This is the control signal to the robot AI
WEIGHTS_FNAME = 'dpos_cnn_weights_%i.hdf'
MODELS_FNAME = 'dpos_cnn_models_%i.h5'


for epoch in range(10000):
    try:
        model.fit_generator(TrainBatchGenDpos(20), 1552, epochs=epoch+1, verbose=1, callbacks=None, class_weight=None, max_q_size=10, workers=1, validation_data=ValidateBatchGenDpos(20), validation_steps = 500, pickle_safe=False, initial_epoch=epoch)
        model.save_weights(WEIGHTS_FNAME % epoch)
        model.save(MODELS_FNAME % epoch)
        print(("Wrote model to " + WEIGHTS_FNAME )  % epoch)
    except KeyboardInterrupt:
        print("\r\nUser stopped the training.")
        break


Updated learner.
Epoch 1/1
1552/1552 [==============================] - 916s - loss: 0.0183 - mse: 1.8290e-05 - val_loss: 0.0067 - val_mse: 6.7201e-06
Wrote model to dpos_cnn_weights_0.hdf
Epoch 2/2
1552/1552 [==============================] - 909s - loss: 0.0091 - mse: 9.1286e-06 - val_loss: 0.0055 - val_mse: 5.5473e-06
Wrote model to dpos_cnn_weights_1.hdf
Epoch 3/3
1552/1552 [==============================] - 909s - loss: 0.0077 - mse: 7.6862e-06 - val_loss: 0.0055 - val_mse: 5.4997e-06
Wrote model to dpos_cnn_weights_2.hdf
Epoch 4/4
1552/1552 [==============================] - 909s - loss: 0.0073 - mse: 7.2884e-06 - val_loss: 0.0055 - val_mse: 5.4799e-06
Wrote model to dpos_cnn_weights_3.hdf
Epoch 5/5
1552/1552 [==============================] - 909s - loss: 0.0071 - mse: 7.1082e-06 - val_loss: 0.0055 - val_mse: 5.4989e-06
Wrote model to dpos_cnn_weights_4.hdf
Epoch 6/6
1552/1552 [==============================] - 909s - loss: 0.0070 - mse: 7.0227e-06 - val_loss: 0.0055 - val_mse: 5.4860e-06
Wrote model to dpos_cnn_weights_5.hdf
Epoch 7/7
1552/1552 [==============================] - 909s - loss: 0.0069 - mse: 6.9477e-06 - val_loss: 0.0055 - val_mse: 5.4895e-06
Wrote model to dpos_cnn_weights_6.hdf
Epoch 8/8
1552/1552 [==============================] - 909s - loss: 0.0069 - mse: 6.9093e-06 - val_loss: 0.0055 - val_mse: 5.4921e-06
Wrote model to dpos_cnn_weights_7.hdf
Epoch 9/9
1552/1552 [==============================] - 909s - loss: 0.0069 - mse: 6.8970e-06 - val_loss: 0.0055 - val_mse: 5.4777e-06
Wrote model to dpos_cnn_weights_8.hdf
Epoch 10/10
1552/1552 [==============================] - 910s - loss: 0.0069 - mse: 6.8791e-06 - val_loss: 0.0055 - val_mse: 5.4780e-06
Wrote model to dpos_cnn_weights_9.hdf
Epoch 11/11
1552/1552 [==============================] - 909s - loss: 0.0069 - mse: 6.8566e-06 - val_loss: 0.0055 - val_mse: 5.5026e-06
Wrote model to dpos_cnn_weights_10.hdf
Epoch 12/12
1552/1552 [==============================] - 909s - loss: 0.0069 - mse: 6.8505e-06 - val_loss: 0.0055 - val_mse: 5.4781e-06
Wrote model to dpos_cnn_weights_11.hdf
Epoch 13/13
1552/1552 [==============================] - 910s - loss: 0.0068 - mse: 6.8439e-06 - val_loss: 0.0055 - val_mse: 5.4694e-06
Wrote model to dpos_cnn_weights_12.hdf
Epoch 14/14
1552/1552 [==============================] - 910s - loss: 0.0068 - mse: 6.8285e-06 - val_loss: 0.0055 - val_mse: 5.4680e-06
Wrote model to dpos_cnn_weights_13.hdf
Epoch 15/15
1552/1552 [==============================] - 910s - loss: 0.0068 - mse: 6.8266e-06 - val_loss: 0.0055 - val_mse: 5.4837e-06
Wrote model to dpos_cnn_weights_14.hdf
Epoch 16/16
1552/1552 [==============================] - 910s - loss: 0.0068 - mse: 6.8112e-06 - val_loss: 0.0055 - val_mse: 5.4974e-06
Wrote model to dpos_cnn_weights_15.hdf
Epoch 17/17
1552/1552 [==============================] - 909s - loss: 0.0068 - mse: 6.8121e-06 - val_loss: 0.0055 - val_mse: 5.4918e-06
Wrote model to dpos_cnn_weights_16.hdf
Epoch 18/18
 366/1552 [======>.......................] - ETA: 633s - loss: 0.0046 - mse: 4.5645e-06
User stopped the training.

In [11]:
# Load the best position prediction model as the starting point
epoch = 17
WEIGHTS_FNAME = 'dpos_cnn_weights_%i.hdf'
model.load_weights(WEIGHTS_FNAME % epoch)
print("Loaded model.")


# Plot the real versus predicted values for some of the validation data
(frames, outputs_true) = next(ValidateBatchGenDpos(2000))
plot_validate(model, frames, outputs_true, "Difference in position")


Loaded model.
2000/2000 [==============================] - 17s    
Predicted.
array([[ 0.04848485,  0.        ,  0.05405406],
       [ 0.02424242, -0.00409836,  0.0972973 ],
       [ 0.00606061, -0.01639344,  0.09189189],
       ..., 
       [-0.02424242, -0.00819672,  0.        ],
       [-0.04242424, -0.00819672,  0.        ],
       [-0.06666667, -0.01229508,  0.        ]], dtype=float32)
array([[ 0.01415953,  0.02283407,  0.00777719],
       [ 0.01175444,  0.01936499,  0.00638732],
       [ 0.01334964,  0.0219138 ,  0.00445735],
       ..., 
       [-0.0007335 ,  0.00177975,  0.01111973],
       [-0.00213342, -0.00057074,  0.00930038],
       [-0.00206168, -0.00097752,  0.0081915 ]], dtype=float32)

In [12]:
def mse(y_true, y_pred):
    return K.square(y_pred - y_true)*0.001 # Hackjob so Keras iterations show exponential value of MSE to get precision.


model.compile(optimizer=keras.optimizers.RMSprop(lr=0.00001),
              loss='mean_squared_error',
              metrics=[mse])

print("Updated learner.")

# Train the model to predict the future position. This is the control signal to the robot AI
WEIGHTS_FNAME = 'dpos_cnn_weights_%i.hdf'
MODELS_FNAME = 'dpos_cnn_models_%i.h5'

start_epoch = epoch+1

for epoch in range(10000):
    epoch += start_epoch
    try:
        model.fit_generator(TrainBatchGenDpos(20), 1552, epochs=epoch+1, verbose=1, callbacks=None, class_weight=None, max_q_size=10, workers=1, validation_data=ValidateBatchGenDpos(20), validation_steps = 500, pickle_safe=False, initial_epoch=epoch)
        model.save_weights(WEIGHTS_FNAME % epoch)
        model.save(MODELS_FNAME % epoch)
        print(("Wrote model to " + WEIGHTS_FNAME )  % epoch)
    except KeyboardInterrupt:
        print("\r\nUser stopped the training.")
        break


Updated learner.
Epoch 19/19
1552/1552 [==============================] - 912s - loss: 0.0066 - mse: 6.6090e-06 - val_loss: 0.0051 - val_mse: 5.1126e-06
Wrote model to dpos_cnn_weights_18.hdf
Epoch 20/20
1552/1552 [==============================] - 914s - loss: 0.0065 - mse: 6.5079e-06 - val_loss: 0.0051 - val_mse: 5.0992e-06
Wrote model to dpos_cnn_weights_19.hdf
Epoch 21/21
1552/1552 [==============================] - 912s - loss: 0.0065 - mse: 6.4604e-06 - val_loss: 0.0052 - val_mse: 5.1792e-06
Wrote model to dpos_cnn_weights_20.hdf
Epoch 22/22
1552/1552 [==============================] - 912s - loss: 0.0065 - mse: 6.4639e-06 - val_loss: 0.0051 - val_mse: 5.0805e-06
Wrote model to dpos_cnn_weights_21.hdf
Epoch 23/23
1552/1552 [==============================] - 912s - loss: 0.0065 - mse: 6.4561e-06 - val_loss: 0.0051 - val_mse: 5.0749e-06
Wrote model to dpos_cnn_weights_22.hdf
Epoch 24/24
1552/1552 [==============================] - 912s - loss: 0.0064 - mse: 6.4163e-06 - val_loss: 0.0050 - val_mse: 5.0363e-06
Wrote model to dpos_cnn_weights_23.hdf
Epoch 25/25
1552/1552 [==============================] - 912s - loss: 0.0064 - mse: 6.4087e-06 - val_loss: 0.0051 - val_mse: 5.0812e-06
Wrote model to dpos_cnn_weights_24.hdf
Epoch 26/26
1552/1552 [==============================] - 914s - loss: 0.0064 - mse: 6.3758e-06 - val_loss: 0.0051 - val_mse: 5.0737e-06
Wrote model to dpos_cnn_weights_25.hdf
Epoch 27/27
1552/1552 [==============================] - 913s - loss: 0.0064 - mse: 6.3734e-06 - val_loss: 0.0051 - val_mse: 5.0546e-06
Wrote model to dpos_cnn_weights_26.hdf
Epoch 28/28
1552/1552 [==============================] - 913s - loss: 0.0064 - mse: 6.3791e-06 - val_loss: 0.0051 - val_mse: 5.0676e-06
Wrote model to dpos_cnn_weights_27.hdf
Epoch 29/29
1552/1552 [==============================] - 913s - loss: 0.0064 - mse: 6.3519e-06 - val_loss: 0.0051 - val_mse: 5.0541e-06
Wrote model to dpos_cnn_weights_28.hdf
Epoch 30/30
1552/1552 [==============================] - 913s - loss: 0.0063 - mse: 6.3364e-06 - val_loss: 0.0050 - val_mse: 5.0397e-06
Wrote model to dpos_cnn_weights_29.hdf
Epoch 31/31
1552/1552 [==============================] - 915s - loss: 0.0063 - mse: 6.2956e-06 - val_loss: 0.0050 - val_mse: 5.0251e-06
Wrote model to dpos_cnn_weights_30.hdf
Epoch 32/32
1552/1552 [==============================] - 911s - loss: 0.0063 - mse: 6.2919e-06 - val_loss: 0.0050 - val_mse: 4.9815e-06
Wrote model to dpos_cnn_weights_31.hdf
Epoch 33/33
1552/1552 [==============================] - 911s - loss: 0.0063 - mse: 6.2771e-06 - val_loss: 0.0050 - val_mse: 5.0359e-06
Wrote model to dpos_cnn_weights_32.hdf
Epoch 34/34
1552/1552 [==============================] - 1875s - loss: 0.0063 - mse: 6.2818e-06 - val_loss: 0.0050 - val_mse: 4.9796e-06
Wrote model to dpos_cnn_weights_33.hdf
Epoch 35/35
1552/1552 [==============================] - 2373s - loss: 0.0063 - mse: 6.2768e-06 - val_loss: 0.0050 - val_mse: 4.9943e-06
Wrote model to dpos_cnn_weights_34.hdf
Epoch 36/36
1552/1552 [==============================] - 2368s - loss: 0.0062 - mse: 6.2480e-06 - val_loss: 0.0050 - val_mse: 5.0166e-06
Wrote model to dpos_cnn_weights_35.hdf
Epoch 37/37
1462/1552 [===========================>..] - ETA: 130s - loss: 0.0064 - mse: 6.3523e-06
User stopped the training.

In [8]:
# Load the best position prediction model as the starting point
epoch = 35
WEIGHTS_FNAME = 'dpos_cnn_weights_%i.hdf'
model.load_weights(WEIGHTS_FNAME % epoch)
print("Loaded model.")


# Plot the real versus predicted values for some of the validation data
(frames, outputs_true) = next(ValidateBatchGenDpos(2000))
plot_validate(model, frames, outputs_true, "Difference in position")


Loaded model.
2000/2000 [==============================] - 17s    
Predicted.
array([[ 0.04848485,  0.        ,  0.05405406],
       [ 0.02424242, -0.00409836,  0.0972973 ],
       [ 0.00606061, -0.01639344,  0.09189189],
       ..., 
       [-0.02424242, -0.00819672,  0.        ],
       [-0.04242424, -0.00819672,  0.        ],
       [-0.06666667, -0.01229508,  0.        ]], dtype=float32)
array([[ 0.00339498,  0.00111478, -0.01982322],
       [ 0.00233725, -0.00139576, -0.02061214],
       [ 0.0015915 , -0.00304804, -0.02379174],
       ..., 
       [ 0.00101039, -0.00167879, -0.00241241],
       [-0.00071766, -0.00406537, -0.00128087],
       [-0.00078701, -0.00381533, -0.00610609]], dtype=float32)

In [9]:
def mse(y_true, y_pred):
    return K.square(y_pred - y_true)*0.001 # Hackjob so Keras iterations show exponential value of MSE to get precision.


model.compile(optimizer=keras.optimizers.RMSprop(lr=0.000001),
              loss='mean_squared_error',
              metrics=[mse])

print("Updated learner.")

# Train the model to predict the future position. This is the control signal to the robot AI
WEIGHTS_FNAME = 'dpos_cnn_weights_%i.hdf'
MODELS_FNAME = 'dpos_cnn_models_%i.h5'

start_epoch = epoch+1

for epoch in range(10000):
    epoch += start_epoch
    try:
        model.fit_generator(TrainBatchGenDpos(20), 1552, epochs=epoch+1, verbose=1, callbacks=None, class_weight=None, max_q_size=10, workers=1, validation_data=ValidateBatchGenDpos(20), validation_steps = 500, pickle_safe=False, initial_epoch=epoch)
        model.save_weights(WEIGHTS_FNAME % epoch)
        model.save(MODELS_FNAME % epoch)
        print(("Wrote model to " + WEIGHTS_FNAME )  % epoch)
    except KeyboardInterrupt:
        print("\r\nUser stopped the training.")
        break


Updated learner.
Epoch 37/37
1552/1552 [==============================] - 911s - loss: 0.0063 - mse: 6.2736e-06 - val_loss: 0.0049 - val_mse: 4.9413e-06
Wrote model to dpos_cnn_weights_36.hdf
Epoch 38/38
1552/1552 [==============================] - 909s - loss: 0.0063 - mse: 6.2616e-06 - val_loss: 0.0049 - val_mse: 4.9240e-06
Wrote model to dpos_cnn_weights_37.hdf
Epoch 39/39
1552/1552 [==============================] - 908s - loss: 0.0063 - mse: 6.2617e-06 - val_loss: 0.0049 - val_mse: 4.9298e-06
Wrote model to dpos_cnn_weights_38.hdf
Epoch 40/40
1552/1552 [==============================] - 908s - loss: 0.0062 - mse: 6.2362e-06 - val_loss: 0.0049 - val_mse: 4.9129e-06
Wrote model to dpos_cnn_weights_39.hdf
Epoch 41/41
1552/1552 [==============================] - 909s - loss: 0.0062 - mse: 6.2357e-06 - val_loss: 0.0049 - val_mse: 4.9029e-06
Wrote model to dpos_cnn_weights_40.hdf
Epoch 42/42
1552/1552 [==============================] - 909s - loss: 0.0062 - mse: 6.2240e-06 - val_loss: 0.0049 - val_mse: 4.9147e-06
Wrote model to dpos_cnn_weights_41.hdf
Epoch 43/43
1552/1552 [==============================] - 909s - loss: 0.0062 - mse: 6.2309e-06 - val_loss: 0.0049 - val_mse: 4.9104e-06
Wrote model to dpos_cnn_weights_42.hdf
Epoch 44/44
1552/1552 [==============================] - 909s - loss: 0.0062 - mse: 6.2426e-06 - val_loss: 0.0049 - val_mse: 4.9190e-06
Wrote model to dpos_cnn_weights_43.hdf
Epoch 45/45
1552/1552 [==============================] - 909s - loss: 0.0062 - mse: 6.2481e-06 - val_loss: 0.0049 - val_mse: 4.9120e-06
Wrote model to dpos_cnn_weights_44.hdf
Epoch 46/46
1552/1552 [==============================] - 909s - loss: 0.0062 - mse: 6.2449e-06 - val_loss: 0.0049 - val_mse: 4.9070e-06
Wrote model to dpos_cnn_weights_45.hdf
Epoch 47/47
1552/1552 [==============================] - 909s - loss: 0.0062 - mse: 6.2296e-06 - val_loss: 0.0049 - val_mse: 4.9141e-06
Wrote model to dpos_cnn_weights_46.hdf
Epoch 48/48
1552/1552 [==============================] - 909s - loss: 0.0062 - mse: 6.2309e-06 - val_loss: 0.0049 - val_mse: 4.8981e-06
Wrote model to dpos_cnn_weights_47.hdf
Epoch 49/49
1552/1552 [==============================] - 909s - loss: 0.0062 - mse: 6.2260e-06 - val_loss: 0.0049 - val_mse: 4.9097e-06
Wrote model to dpos_cnn_weights_48.hdf
Epoch 50/50
1552/1552 [==============================] - 909s - loss: 0.0062 - mse: 6.2415e-06 - val_loss: 0.0049 - val_mse: 4.9136e-06
Wrote model to dpos_cnn_weights_49.hdf
Epoch 51/51
1552/1552 [==============================] - 909s - loss: 0.0062 - mse: 6.2242e-06 - val_loss: 0.0049 - val_mse: 4.8967e-06
Wrote model to dpos_cnn_weights_50.hdf
Epoch 52/52
1552/1552 [==============================] - 909s - loss: 0.0062 - mse: 6.1989e-06 - val_loss: 0.0049 - val_mse: 4.8974e-06
Wrote model to dpos_cnn_weights_51.hdf
Epoch 53/53
1552/1552 [==============================] - 909s - loss: 0.0062 - mse: 6.2130e-06 - val_loss: 0.0049 - val_mse: 4.9172e-06
Wrote model to dpos_cnn_weights_52.hdf
Epoch 54/54
1552/1552 [==============================] - 909s - loss: 0.0062 - mse: 6.2223e-06 - val_loss: 0.0049 - val_mse: 4.9044e-06
Wrote model to dpos_cnn_weights_53.hdf
Epoch 55/55
1552/1552 [==============================] - 909s - loss: 0.0062 - mse: 6.2105e-06 - val_loss: 0.0049 - val_mse: 4.8964e-06
Wrote model to dpos_cnn_weights_54.hdf
Epoch 56/56
1552/1552 [==============================] - 909s - loss: 0.0062 - mse: 6.2037e-06 - val_loss: 0.0049 - val_mse: 4.8935e-06
Wrote model to dpos_cnn_weights_55.hdf
Epoch 57/57
1552/1552 [==============================] - 912s - loss: 0.0062 - mse: 6.2012e-06 - val_loss: 0.0049 - val_mse: 4.9046e-06
Wrote model to dpos_cnn_weights_56.hdf
Epoch 58/58
1552/1552 [==============================] - 913s - loss: 0.0062 - mse: 6.2003e-06 - val_loss: 0.0049 - val_mse: 4.8954e-06
Wrote model to dpos_cnn_weights_57.hdf
Epoch 59/59
1552/1552 [==============================] - 910s - loss: 0.0062 - mse: 6.1967e-06 - val_loss: 0.0049 - val_mse: 4.8922e-06
Wrote model to dpos_cnn_weights_58.hdf
Epoch 60/60
1552/1552 [==============================] - 910s - loss: 0.0062 - mse: 6.1948e-06 - val_loss: 0.0049 - val_mse: 4.8913e-06
Wrote model to dpos_cnn_weights_59.hdf
Epoch 61/61
1552/1552 [==============================] - 911s - loss: 0.0062 - mse: 6.1701e-06 - val_loss: 0.0049 - val_mse: 4.8846e-06
Wrote model to dpos_cnn_weights_60.hdf
Epoch 62/62
1552/1552 [==============================] - 910s - loss: 0.0062 - mse: 6.2224e-06 - val_loss: 0.0049 - val_mse: 4.8867e-06
Wrote model to dpos_cnn_weights_61.hdf
Epoch 63/63
 813/1552 [==============>...............] - ETA: 398s - loss: 0.0058 - mse: 5.7987e-06
User stopped the training.

In [10]:
# Load the best position prediction model as the starting point
epoch = 61
WEIGHTS_FNAME = 'dpos_cnn_weights_%i.hdf'
model.load_weights(WEIGHTS_FNAME % epoch)
print("Loaded model.")


# Plot the real versus predicted values for some of the validation data
(frames, outputs_true) = next(ValidateBatchGenDpos(2000))
plot_validate(model, frames, outputs_true, "Difference in position")


Loaded model.
2000/2000 [==============================] - 17s    
Predicted.
array([[ 0.04848485,  0.        ,  0.05405406],
       [ 0.02424242, -0.00409836,  0.0972973 ],
       [ 0.00606061, -0.01639344,  0.09189189],
       ..., 
       [-0.02424242, -0.00819672,  0.        ],
       [-0.04242424, -0.00819672,  0.        ],
       [-0.06666667, -0.01229508,  0.        ]], dtype=float32)
array([[ 0.0093574 ,  0.01873443,  0.00540401],
       [ 0.00718395,  0.01508207,  0.00241166],
       [ 0.0044055 ,  0.01147011, -0.00490699],
       ..., 
       [ 0.00487349,  0.0057203 ,  0.00842261],
       [ 0.0024164 ,  0.00172509,  0.00668769],
       [ 0.00082103, -0.00023784,  0.00527946]], dtype=float32)

In [11]:
def mse(y_true, y_pred):
    return K.square(y_pred - y_true)*0.001 # Hackjob so Keras iterations show exponential value of MSE to get precision.


model.compile(optimizer=keras.optimizers.RMSprop(lr=0.00001),
              loss='mean_squared_error',
              metrics=[mse])

print("Updated learner.")

# Train the model to predict the future position. This is the control signal to the robot AI
WEIGHTS_FNAME = 'dpos_cnn_weights_%i.hdf'
MODELS_FNAME = 'dpos_cnn_models_%i.h5'

start_epoch = epoch+1

for epoch in range(10000):
    epoch += start_epoch
    try:
        model.fit_generator(TrainBatchGenDpos(20), 1552, epochs=epoch+1, verbose=1, callbacks=None, class_weight=None, max_q_size=10, workers=1, validation_data=ValidateBatchGenDpos(20), validation_steps = 500, pickle_safe=False, initial_epoch=epoch)
        model.save_weights(WEIGHTS_FNAME % epoch)
        model.save(MODELS_FNAME % epoch)
        print(("Wrote model to " + WEIGHTS_FNAME )  % epoch)
    except KeyboardInterrupt:
        print("\r\nUser stopped the training.")
        break


Updated learner.
Epoch 63/63
1552/1552 [==============================] - 915s - loss: 0.0062 - mse: 6.2348e-06 - val_loss: 0.0050 - val_mse: 4.9908e-06
Wrote model to dpos_cnn_weights_62.hdf
Epoch 64/64
1552/1552 [==============================] - 910s - loss: 0.0062 - mse: 6.2184e-06 - val_loss: 0.0050 - val_mse: 4.9913e-06
Wrote model to dpos_cnn_weights_63.hdf
Epoch 65/65
1552/1552 [==============================] - 911s - loss: 0.0062 - mse: 6.1976e-06 - val_loss: 0.0050 - val_mse: 4.9966e-06
Wrote model to dpos_cnn_weights_64.hdf
Epoch 66/66
1552/1552 [==============================] - 911s - loss: 0.0062 - mse: 6.1699e-06 - val_loss: 0.0049 - val_mse: 4.9112e-06
Wrote model to dpos_cnn_weights_65.hdf
Epoch 67/67
1552/1552 [==============================] - 916s - loss: 0.0062 - mse: 6.1756e-06 - val_loss: 0.0050 - val_mse: 4.9778e-06
Wrote model to dpos_cnn_weights_66.hdf
Epoch 68/68
1552/1552 [==============================] - 911s - loss: 0.0062 - mse: 6.1708e-06 - val_loss: 0.0049 - val_mse: 4.9349e-06
Wrote model to dpos_cnn_weights_67.hdf
Epoch 69/69
1552/1552 [==============================] - 911s - loss: 0.0062 - mse: 6.1643e-06 - val_loss: 0.0049 - val_mse: 4.9153e-06
Wrote model to dpos_cnn_weights_68.hdf
Epoch 70/70
1552/1552 [==============================] - 911s - loss: 0.0061 - mse: 6.1494e-06 - val_loss: 0.0050 - val_mse: 4.9744e-06
Wrote model to dpos_cnn_weights_69.hdf
Epoch 71/71
1552/1552 [==============================] - 911s - loss: 0.0061 - mse: 6.1391e-06 - val_loss: 0.0049 - val_mse: 4.9191e-06
Wrote model to dpos_cnn_weights_70.hdf
Epoch 72/72
1552/1552 [==============================] - 911s - loss: 0.0061 - mse: 6.1443e-06 - val_loss: 0.0049 - val_mse: 4.9240e-06
Wrote model to dpos_cnn_weights_71.hdf
Epoch 73/73
1552/1552 [==============================] - 912s - loss: 0.0061 - mse: 6.1051e-06 - val_loss: 0.0049 - val_mse: 4.8926e-06
Wrote model to dpos_cnn_weights_72.hdf
Epoch 74/74
1552/1552 [==============================] - 912s - loss: 0.0061 - mse: 6.1190e-06 - val_loss: 0.0049 - val_mse: 4.8535e-06
Wrote model to dpos_cnn_weights_73.hdf
Epoch 75/75
1552/1552 [==============================] - 911s - loss: 0.0061 - mse: 6.0921e-06 - val_loss: 0.0049 - val_mse: 4.8750e-06
Wrote model to dpos_cnn_weights_74.hdf
Epoch 76/76
1552/1552 [==============================] - 911s - loss: 0.0061 - mse: 6.1001e-06 - val_loss: 0.0049 - val_mse: 4.9255e-06
Wrote model to dpos_cnn_weights_75.hdf
Epoch 77/77
1552/1552 [==============================] - 911s - loss: 0.0061 - mse: 6.0812e-06 - val_loss: 0.0049 - val_mse: 4.9043e-06
Wrote model to dpos_cnn_weights_76.hdf
Epoch 78/78
1552/1552 [==============================] - 911s - loss: 0.0061 - mse: 6.0648e-06 - val_loss: 0.0049 - val_mse: 4.8826e-06
Wrote model to dpos_cnn_weights_77.hdf
Epoch 79/79
1552/1552 [==============================] - 911s - loss: 0.0061 - mse: 6.0769e-06 - val_loss: 0.0049 - val_mse: 4.8718e-06
Wrote model to dpos_cnn_weights_78.hdf
Epoch 80/80
1552/1552 [==============================] - 911s - loss: 0.0061 - mse: 6.0525e-06 - val_loss: 0.0048 - val_mse: 4.8353e-06
Wrote model to dpos_cnn_weights_79.hdf
Epoch 81/81
1552/1552 [==============================] - 912s - loss: 0.0061 - mse: 6.0514e-06 - val_loss: 0.0049 - val_mse: 4.8602e-06
Wrote model to dpos_cnn_weights_80.hdf
Epoch 82/82
1552/1552 [==============================] - 911s - loss: 0.0060 - mse: 6.0246e-06 - val_loss: 0.0049 - val_mse: 4.8729e-06
Wrote model to dpos_cnn_weights_81.hdf
Epoch 83/83
1552/1552 [==============================] - 912s - loss: 0.0060 - mse: 6.0485e-06 - val_loss: 0.0049 - val_mse: 4.8816e-06
Wrote model to dpos_cnn_weights_82.hdf
Epoch 84/84
1552/1552 [==============================] - 911s - loss: 0.0061 - mse: 6.0606e-06 - val_loss: 0.0049 - val_mse: 4.8664e-06
Wrote model to dpos_cnn_weights_83.hdf
Epoch 85/85
1552/1552 [==============================] - 911s - loss: 0.0060 - mse: 6.0368e-06 - val_loss: 0.0048 - val_mse: 4.8340e-06
Wrote model to dpos_cnn_weights_84.hdf
Epoch 86/86
1552/1552 [==============================] - 912s - loss: 0.0060 - mse: 6.0173e-06 - val_loss: 0.0048 - val_mse: 4.7661e-06
Wrote model to dpos_cnn_weights_85.hdf
Epoch 87/87
1552/1552 [==============================] - 911s - loss: 0.0060 - mse: 6.0293e-06 - val_loss: 0.0049 - val_mse: 4.9176e-06
Wrote model to dpos_cnn_weights_86.hdf
Epoch 88/88
1552/1552 [==============================] - 912s - loss: 0.0060 - mse: 6.0113e-06 - val_loss: 0.0048 - val_mse: 4.8257e-06
Wrote model to dpos_cnn_weights_87.hdf
Epoch 89/89
1552/1552 [==============================] - 911s - loss: 0.0060 - mse: 6.0112e-06 - val_loss: 0.0048 - val_mse: 4.8348e-06
Wrote model to dpos_cnn_weights_88.hdf
Epoch 90/90
1552/1552 [==============================] - 912s - loss: 0.0060 - mse: 6.0159e-06 - val_loss: 0.0048 - val_mse: 4.7615e-06
Wrote model to dpos_cnn_weights_89.hdf
Epoch 91/91
1552/1552 [==============================] - 911s - loss: 0.0060 - mse: 5.9908e-06 - val_loss: 0.0048 - val_mse: 4.7514e-06
Wrote model to dpos_cnn_weights_90.hdf
Epoch 92/92
1552/1552 [==============================] - 911s - loss: 0.0060 - mse: 5.9549e-06 - val_loss: 0.0048 - val_mse: 4.7677e-06
Wrote model to dpos_cnn_weights_91.hdf
Epoch 93/93
1552/1552 [==============================] - 911s - loss: 0.0060 - mse: 5.9746e-06 - val_loss: 0.0048 - val_mse: 4.8332e-06
Wrote model to dpos_cnn_weights_92.hdf
Epoch 94/94
1552/1552 [==============================] - 912s - loss: 0.0060 - mse: 5.9772e-06 - val_loss: 0.0048 - val_mse: 4.8145e-06
Wrote model to dpos_cnn_weights_93.hdf
Epoch 95/95
1552/1552 [==============================] - 912s - loss: 0.0060 - mse: 5.9581e-06 - val_loss: 0.0048 - val_mse: 4.7903e-06
Wrote model to dpos_cnn_weights_94.hdf
Epoch 96/96
1552/1552 [==============================] - 911s - loss: 0.0060 - mse: 5.9533e-06 - val_loss: 0.0048 - val_mse: 4.7878e-06
Wrote model to dpos_cnn_weights_95.hdf
Epoch 97/97
1552/1552 [==============================] - 912s - loss: 0.0060 - mse: 5.9645e-06 - val_loss: 0.0048 - val_mse: 4.8030e-06
Wrote model to dpos_cnn_weights_96.hdf
Epoch 98/98
1552/1552 [==============================] - 912s - loss: 0.0059 - mse: 5.9059e-06 - val_loss: 0.0048 - val_mse: 4.7810e-06
Wrote model to dpos_cnn_weights_97.hdf
Epoch 99/99
1552/1552 [==============================] - 911s - loss: 0.0059 - mse: 5.9347e-06 - val_loss: 0.0047 - val_mse: 4.7250e-06
Wrote model to dpos_cnn_weights_98.hdf
Epoch 100/100
1552/1552 [==============================] - 912s - loss: 0.0060 - mse: 5.9522e-06 - val_loss: 0.0047 - val_mse: 4.7136e-06
Wrote model to dpos_cnn_weights_99.hdf
Epoch 101/101
1552/1552 [==============================] - 911s - loss: 0.0059 - mse: 5.9231e-06 - val_loss: 0.0048 - val_mse: 4.7752e-06
Wrote model to dpos_cnn_weights_100.hdf
Epoch 102/102
1552/1552 [==============================] - 911s - loss: 0.0059 - mse: 5.9486e-06 - val_loss: 0.0048 - val_mse: 4.7721e-06
Wrote model to dpos_cnn_weights_101.hdf
Epoch 103/103
1552/1552 [==============================] - 912s - loss: 0.0059 - mse: 5.9177e-06 - val_loss: 0.0048 - val_mse: 4.7782e-06
Wrote model to dpos_cnn_weights_102.hdf
Epoch 104/104
1552/1552 [==============================] - 911s - loss: 0.0059 - mse: 5.8985e-06 - val_loss: 0.0047 - val_mse: 4.7318e-06
Wrote model to dpos_cnn_weights_103.hdf
Epoch 105/105
1552/1552 [==============================] - 911s - loss: 0.0059 - mse: 5.8843e-06 - val_loss: 0.0048 - val_mse: 4.7606e-06
Wrote model to dpos_cnn_weights_104.hdf
Epoch 106/106
1552/1552 [==============================] - 912s - loss: 0.0059 - mse: 5.9014e-06 - val_loss: 0.0047 - val_mse: 4.6958e-06
Wrote model to dpos_cnn_weights_105.hdf
Epoch 107/107
1552/1552 [==============================] - 912s - loss: 0.0059 - mse: 5.8883e-06 - val_loss: 0.0047 - val_mse: 4.6931e-06
Wrote model to dpos_cnn_weights_106.hdf
Epoch 108/108
1552/1552 [==============================] - 912s - loss: 0.0059 - mse: 5.8922e-06 - val_loss: 0.0047 - val_mse: 4.6979e-06
Wrote model to dpos_cnn_weights_107.hdf
Epoch 109/109
1552/1552 [==============================] - 912s - loss: 0.0059 - mse: 5.8764e-06 - val_loss: 0.0047 - val_mse: 4.7136e-06
Wrote model to dpos_cnn_weights_108.hdf
Epoch 110/110
1552/1552 [==============================] - 912s - loss: 0.0059 - mse: 5.8686e-06 - val_loss: 0.0047 - val_mse: 4.7290e-06
Wrote model to dpos_cnn_weights_109.hdf
Epoch 111/111
1552/1552 [==============================] - 911s - loss: 0.0059 - mse: 5.8780e-06 - val_loss: 0.0048 - val_mse: 4.7594e-06
Wrote model to dpos_cnn_weights_110.hdf
Epoch 112/112
1552/1552 [==============================] - 911s - loss: 0.0059 - mse: 5.8607e-06 - val_loss: 0.0047 - val_mse: 4.7412e-06
Wrote model to dpos_cnn_weights_111.hdf
Epoch 113/113
1552/1552 [==============================] - 912s - loss: 0.0059 - mse: 5.8571e-06 - val_loss: 0.0047 - val_mse: 4.6775e-06
Wrote model to dpos_cnn_weights_112.hdf
Epoch 114/114
1552/1552 [==============================] - 912s - loss: 0.0059 - mse: 5.8864e-06 - val_loss: 0.0047 - val_mse: 4.6830e-06
Wrote model to dpos_cnn_weights_113.hdf
Epoch 115/115
1552/1552 [==============================] - 911s - loss: 0.0058 - mse: 5.8427e-06 - val_loss: 0.0047 - val_mse: 4.6872e-06
Wrote model to dpos_cnn_weights_114.hdf
Epoch 116/116
1552/1552 [==============================] - 912s - loss: 0.0059 - mse: 5.8629e-06 - val_loss: 0.0047 - val_mse: 4.7229e-06
Wrote model to dpos_cnn_weights_115.hdf
Epoch 117/117
1552/1552 [==============================] - 911s - loss: 0.0059 - mse: 5.8630e-06 - val_loss: 0.0047 - val_mse: 4.6700e-06
Wrote model to dpos_cnn_weights_116.hdf
Epoch 118/118
1552/1552 [==============================] - 912s - loss: 0.0058 - mse: 5.8372e-06 - val_loss: 0.0047 - val_mse: 4.6914e-06
Wrote model to dpos_cnn_weights_117.hdf
Epoch 119/119
1552/1552 [==============================] - 911s - loss: 0.0059 - mse: 5.8562e-06 - val_loss: 0.0047 - val_mse: 4.7026e-06
Wrote model to dpos_cnn_weights_118.hdf
Epoch 120/120
1552/1552 [==============================] - 912s - loss: 0.0058 - mse: 5.7910e-06 - val_loss: 0.0048 - val_mse: 4.7848e-06
Wrote model to dpos_cnn_weights_119.hdf
Epoch 121/121
1552/1552 [==============================] - 912s - loss: 0.0058 - mse: 5.8265e-06 - val_loss: 0.0047 - val_mse: 4.6684e-06
Wrote model to dpos_cnn_weights_120.hdf
Epoch 122/122
1552/1552 [==============================] - 911s - loss: 0.0058 - mse: 5.8467e-06 - val_loss: 0.0046 - val_mse: 4.6372e-06
Wrote model to dpos_cnn_weights_121.hdf
Epoch 123/123
1552/1552 [==============================] - 912s - loss: 0.0058 - mse: 5.8147e-06 - val_loss: 0.0047 - val_mse: 4.6519e-06
Wrote model to dpos_cnn_weights_122.hdf
Epoch 124/124
1552/1552 [==============================] - 911s - loss: 0.0058 - mse: 5.7977e-06 - val_loss: 0.0047 - val_mse: 4.6562e-06
Wrote model to dpos_cnn_weights_123.hdf
Epoch 125/125
1552/1552 [==============================] - 911s - loss: 0.0059 - mse: 5.8553e-06 - val_loss: 0.0047 - val_mse: 4.6649e-06
Wrote model to dpos_cnn_weights_124.hdf
Epoch 126/126
1552/1552 [==============================] - 912s - loss: 0.0058 - mse: 5.8029e-06 - val_loss: 0.0047 - val_mse: 4.6565e-06
Wrote model to dpos_cnn_weights_125.hdf
Epoch 127/127
1552/1552 [==============================] - 912s - loss: 0.0058 - mse: 5.8255e-06 - val_loss: 0.0047 - val_mse: 4.6567e-06
Wrote model to dpos_cnn_weights_126.hdf
Epoch 128/128
1552/1552 [==============================] - 911s - loss: 0.0058 - mse: 5.7929e-06 - val_loss: 0.0047 - val_mse: 4.6703e-06
Wrote model to dpos_cnn_weights_127.hdf
Epoch 129/129
1552/1552 [==============================] - 912s - loss: 0.0058 - mse: 5.8093e-06 - val_loss: 0.0047 - val_mse: 4.6814e-06
Wrote model to dpos_cnn_weights_128.hdf
Epoch 130/130
1552/1552 [==============================] - 911s - loss: 0.0058 - mse: 5.8092e-06 - val_loss: 0.0047 - val_mse: 4.6882e-06
Wrote model to dpos_cnn_weights_129.hdf
Epoch 131/131
1552/1552 [==============================] - 912s - loss: 0.0058 - mse: 5.7747e-06 - val_loss: 0.0048 - val_mse: 4.7540e-06
Wrote model to dpos_cnn_weights_130.hdf
Epoch 132/132
1552/1552 [==============================] - 911s - loss: 0.0058 - mse: 5.7658e-06 - val_loss: 0.0046 - val_mse: 4.5984e-06
Wrote model to dpos_cnn_weights_131.hdf
Epoch 133/133
1552/1552 [==============================] - 911s - loss: 0.0058 - mse: 5.7699e-06 - val_loss: 0.0047 - val_mse: 4.6858e-06
Wrote model to dpos_cnn_weights_132.hdf
Epoch 134/134
1552/1552 [==============================] - 911s - loss: 0.0058 - mse: 5.7873e-06 - val_loss: 0.0047 - val_mse: 4.6683e-06
Wrote model to dpos_cnn_weights_133.hdf
Epoch 135/135
1552/1552 [==============================] - 911s - loss: 0.0058 - mse: 5.7656e-06 - val_loss: 0.0047 - val_mse: 4.6581e-06
Wrote model to dpos_cnn_weights_134.hdf
Epoch 136/136
1552/1552 [==============================] - 911s - loss: 0.0058 - mse: 5.7704e-06 - val_loss: 0.0047 - val_mse: 4.6638e-06
Wrote model to dpos_cnn_weights_135.hdf
Epoch 137/137
1552/1552 [==============================] - 912s - loss: 0.0058 - mse: 5.7564e-06 - val_loss: 0.0047 - val_mse: 4.6526e-06
Wrote model to dpos_cnn_weights_136.hdf
Epoch 138/138
1552/1552 [==============================] - 912s - loss: 0.0058 - mse: 5.7814e-06 - val_loss: 0.0046 - val_mse: 4.6384e-06
Wrote model to dpos_cnn_weights_137.hdf
Epoch 139/139
1552/1552 [==============================] - 911s - loss: 0.0058 - mse: 5.7606e-06 - val_loss: 0.0046 - val_mse: 4.6281e-06
Wrote model to dpos_cnn_weights_138.hdf
Epoch 140/140
1552/1552 [==============================] - 912s - loss: 0.0058 - mse: 5.7584e-06 - val_loss: 0.0046 - val_mse: 4.6252e-06
Wrote model to dpos_cnn_weights_139.hdf
Epoch 141/141
1552/1552 [==============================] - 912s - loss: 0.0058 - mse: 5.7659e-06 - val_loss: 0.0046 - val_mse: 4.6442e-06
Wrote model to dpos_cnn_weights_140.hdf
Epoch 142/142
1552/1552 [==============================] - 912s - loss: 0.0058 - mse: 5.7613e-06 - val_loss: 0.0046 - val_mse: 4.6118e-06
Wrote model to dpos_cnn_weights_141.hdf
Epoch 143/143
1552/1552 [==============================] - 912s - loss: 0.0057 - mse: 5.7317e-06 - val_loss: 0.0046 - val_mse: 4.6265e-06
Wrote model to dpos_cnn_weights_142.hdf
Epoch 144/144
1552/1552 [==============================] - 911s - loss: 0.0057 - mse: 5.7125e-06 - val_loss: 0.0046 - val_mse: 4.5978e-06
Wrote model to dpos_cnn_weights_143.hdf
Epoch 145/145
1552/1552 [==============================] - 911s - loss: 0.0058 - mse: 5.7526e-06 - val_loss: 0.0047 - val_mse: 4.6912e-06
Wrote model to dpos_cnn_weights_144.hdf
Epoch 146/146
1552/1552 [==============================] - 911s - loss: 0.0057 - mse: 5.7302e-06 - val_loss: 0.0046 - val_mse: 4.6007e-06
Wrote model to dpos_cnn_weights_145.hdf
Epoch 147/147
1552/1552 [==============================] - 911s - loss: 0.0057 - mse: 5.7330e-06 - val_loss: 0.0046 - val_mse: 4.5772e-06
Wrote model to dpos_cnn_weights_146.hdf
Epoch 148/148
1552/1552 [==============================] - 912s - loss: 0.0057 - mse: 5.7239e-06 - val_loss: 0.0046 - val_mse: 4.5791e-06
Wrote model to dpos_cnn_weights_147.hdf
Epoch 149/149
1552/1552 [==============================] - 912s - loss: 0.0057 - mse: 5.6937e-06 - val_loss: 0.0046 - val_mse: 4.6131e-06
Wrote model to dpos_cnn_weights_148.hdf
Epoch 150/150
1552/1552 [==============================] - 911s - loss: 0.0057 - mse: 5.7134e-06 - val_loss: 0.0046 - val_mse: 4.6228e-06
Wrote model to dpos_cnn_weights_149.hdf
Epoch 151/151
1552/1552 [==============================] - 912s - loss: 0.0057 - mse: 5.7061e-06 - val_loss: 0.0046 - val_mse: 4.6063e-06
Wrote model to dpos_cnn_weights_150.hdf
Epoch 152/152
1552/1552 [==============================] - 911s - loss: 0.0057 - mse: 5.7250e-06 - val_loss: 0.0046 - val_mse: 4.5581e-06
Wrote model to dpos_cnn_weights_151.hdf
Epoch 153/153
1552/1552 [==============================] - 912s - loss: 0.0057 - mse: 5.7125e-06 - val_loss: 0.0046 - val_mse: 4.6032e-06
Wrote model to dpos_cnn_weights_152.hdf
Epoch 154/154
1552/1552 [==============================] - 911s - loss: 0.0057 - mse: 5.6983e-06 - val_loss: 0.0046 - val_mse: 4.6096e-06
Wrote model to dpos_cnn_weights_153.hdf
Epoch 155/155
1552/1552 [==============================] - 912s - loss: 0.0057 - mse: 5.7060e-06 - val_loss: 0.0046 - val_mse: 4.6202e-06
Wrote model to dpos_cnn_weights_154.hdf
Epoch 156/156
1552/1552 [==============================] - 912s - loss: 0.0057 - mse: 5.6507e-06 - val_loss: 0.0046 - val_mse: 4.6123e-06
Wrote model to dpos_cnn_weights_155.hdf
Epoch 157/157
1552/1552 [==============================] - 912s - loss: 0.0057 - mse: 5.6981e-06 - val_loss: 0.0045 - val_mse: 4.5355e-06
Wrote model to dpos_cnn_weights_156.hdf
Epoch 158/158
1552/1552 [==============================] - 912s - loss: 0.0057 - mse: 5.7144e-06 - val_loss: 0.0046 - val_mse: 4.6318e-06
Wrote model to dpos_cnn_weights_157.hdf
Epoch 159/159
1552/1552 [==============================] - 911s - loss: 0.0057 - mse: 5.6677e-06 - val_loss: 0.0046 - val_mse: 4.5699e-06
Wrote model to dpos_cnn_weights_158.hdf
Epoch 160/160
1552/1552 [==============================] - 911s - loss: 0.0057 - mse: 5.6745e-06 - val_loss: 0.0047 - val_mse: 4.6601e-06
Wrote model to dpos_cnn_weights_159.hdf
Epoch 161/161
1552/1552 [==============================] - 912s - loss: 0.0057 - mse: 5.6656e-06 - val_loss: 0.0045 - val_mse: 4.5321e-06
Wrote model to dpos_cnn_weights_160.hdf
Epoch 162/162
1552/1552 [==============================] - 912s - loss: 0.0056 - mse: 5.6493e-06 - val_loss: 0.0046 - val_mse: 4.6343e-06
Wrote model to dpos_cnn_weights_161.hdf
Epoch 163/163
1552/1552 [==============================] - 912s - loss: 0.0057 - mse: 5.6506e-06 - val_loss: 0.0046 - val_mse: 4.5587e-06
Wrote model to dpos_cnn_weights_162.hdf
Epoch 164/164
1552/1552 [==============================] - 912s - loss: 0.0057 - mse: 5.6522e-06 - val_loss: 0.0046 - val_mse: 4.6394e-06
Wrote model to dpos_cnn_weights_163.hdf
Epoch 165/165
1552/1552 [==============================] - 911s - loss: 0.0057 - mse: 5.6712e-06 - val_loss: 0.0046 - val_mse: 4.5754e-06
Wrote model to dpos_cnn_weights_164.hdf
Epoch 166/166
1552/1552 [==============================] - 911s - loss: 0.0057 - mse: 5.6634e-06 - val_loss: 0.0046 - val_mse: 4.5818e-06
Wrote model to dpos_cnn_weights_165.hdf
Epoch 167/167
1552/1552 [==============================] - 912s - loss: 0.0057 - mse: 5.6636e-06 - val_loss: 0.0046 - val_mse: 4.6269e-06
Wrote model to dpos_cnn_weights_166.hdf
Epoch 168/168
1552/1552 [==============================] - 911s - loss: 0.0056 - mse: 5.6434e-06 - val_loss: 0.0046 - val_mse: 4.6362e-06
Wrote model to dpos_cnn_weights_167.hdf
Epoch 169/169
1552/1552 [==============================] - 911s - loss: 0.0057 - mse: 5.6607e-06 - val_loss: 0.0045 - val_mse: 4.4877e-06
Wrote model to dpos_cnn_weights_168.hdf
Epoch 170/170
1552/1552 [==============================] - 911s - loss: 0.0056 - mse: 5.6389e-06 - val_loss: 0.0046 - val_mse: 4.5893e-06
Wrote model to dpos_cnn_weights_169.hdf
Epoch 171/171
1552/1552 [==============================] - 911s - loss: 0.0056 - mse: 5.6476e-06 - val_loss: 0.0045 - val_mse: 4.5163e-06
Wrote model to dpos_cnn_weights_170.hdf
Epoch 172/172
1552/1552 [==============================] - 911s - loss: 0.0056 - mse: 5.6203e-06 - val_loss: 0.0046 - val_mse: 4.5697e-06
Wrote model to dpos_cnn_weights_171.hdf
Epoch 173/173
1552/1552 [==============================] - 911s - loss: 0.0056 - mse: 5.6290e-06 - val_loss: 0.0046 - val_mse: 4.5701e-06
Wrote model to dpos_cnn_weights_172.hdf
Epoch 174/174
1552/1552 [==============================] - 912s - loss: 0.0057 - mse: 5.6774e-06 - val_loss: 0.0045 - val_mse: 4.5396e-06
Wrote model to dpos_cnn_weights_173.hdf
Epoch 175/175
1552/1552 [==============================] - 911s - loss: 0.0056 - mse: 5.6250e-06 - val_loss: 0.0046 - val_mse: 4.5963e-06
Wrote model to dpos_cnn_weights_174.hdf
Epoch 176/176
1552/1552 [==============================] - 912s - loss: 0.0056 - mse: 5.6287e-06 - val_loss: 0.0046 - val_mse: 4.5662e-06
Wrote model to dpos_cnn_weights_175.hdf
Epoch 177/177
1552/1552 [==============================] - 911s - loss: 0.0056 - mse: 5.6005e-06 - val_loss: 0.0046 - val_mse: 4.5506e-06
Wrote model to dpos_cnn_weights_176.hdf
Epoch 178/178
1552/1552 [==============================] - 912s - loss: 0.0056 - mse: 5.6201e-06 - val_loss: 0.0045 - val_mse: 4.5152e-06
Wrote model to dpos_cnn_weights_177.hdf
Epoch 179/179
1552/1552 [==============================] - 912s - loss: 0.0056 - mse: 5.6343e-06 - val_loss: 0.0046 - val_mse: 4.5606e-06
Wrote model to dpos_cnn_weights_178.hdf
Epoch 180/180
1552/1552 [==============================] - 912s - loss: 0.0056 - mse: 5.6238e-06 - val_loss: 0.0046 - val_mse: 4.5946e-06
Wrote model to dpos_cnn_weights_179.hdf
Epoch 181/181
1552/1552 [==============================] - 912s - loss: 0.0056 - mse: 5.6025e-06 - val_loss: 0.0046 - val_mse: 4.6250e-06
Wrote model to dpos_cnn_weights_180.hdf
Epoch 182/182
1552/1552 [==============================] - 912s - loss: 0.0056 - mse: 5.6078e-06 - val_loss: 0.0045 - val_mse: 4.4944e-06
Wrote model to dpos_cnn_weights_181.hdf
Epoch 183/183
1552/1552 [==============================] - 911s - loss: 0.0056 - mse: 5.5932e-06 - val_loss: 0.0045 - val_mse: 4.4737e-06
Wrote model to dpos_cnn_weights_182.hdf
Epoch 184/184
1552/1552 [==============================] - 911s - loss: 0.0056 - mse: 5.6381e-06 - val_loss: 0.0046 - val_mse: 4.5575e-06
Wrote model to dpos_cnn_weights_183.hdf
Epoch 185/185
1552/1552 [==============================] - 911s - loss: 0.0056 - mse: 5.6194e-06 - val_loss: 0.0046 - val_mse: 4.5806e-06
Wrote model to dpos_cnn_weights_184.hdf
Epoch 186/186
1552/1552 [==============================] - 912s - loss: 0.0056 - mse: 5.5826e-06 - val_loss: 0.0045 - val_mse: 4.4845e-06
Wrote model to dpos_cnn_weights_185.hdf
Epoch 187/187
1552/1552 [==============================] - 911s - loss: 0.0056 - mse: 5.6046e-06 - val_loss: 0.0045 - val_mse: 4.5105e-06
Wrote model to dpos_cnn_weights_186.hdf
Epoch 188/188
1552/1552 [==============================] - 911s - loss: 0.0056 - mse: 5.6204e-06 - val_loss: 0.0045 - val_mse: 4.5421e-06
Wrote model to dpos_cnn_weights_187.hdf
Epoch 189/189
1552/1552 [==============================] - 911s - loss: 0.0056 - mse: 5.6168e-06 - val_loss: 0.0045 - val_mse: 4.5193e-06
Wrote model to dpos_cnn_weights_188.hdf
Epoch 190/190
1552/1552 [==============================] - 912s - loss: 0.0056 - mse: 5.5897e-06 - val_loss: 0.0045 - val_mse: 4.5245e-06
Wrote model to dpos_cnn_weights_189.hdf
Epoch 191/191
1552/1552 [==============================] - 911s - loss: 0.0056 - mse: 5.6122e-06 - val_loss: 0.0046 - val_mse: 4.5538e-06
Wrote model to dpos_cnn_weights_190.hdf
Epoch 192/192
1552/1552 [==============================] - 911s - loss: 0.0056 - mse: 5.6003e-06 - val_loss: 0.0045 - val_mse: 4.5330e-06
Wrote model to dpos_cnn_weights_191.hdf
Epoch 193/193
1552/1552 [==============================] - 912s - loss: 0.0056 - mse: 5.5520e-06 - val_loss: 0.0046 - val_mse: 4.5836e-06
Wrote model to dpos_cnn_weights_192.hdf
Epoch 194/194
1552/1552 [==============================] - 911s - loss: 0.0056 - mse: 5.5679e-06 - val_loss: 0.0045 - val_mse: 4.5019e-06
Wrote model to dpos_cnn_weights_193.hdf
Epoch 195/195
1552/1552 [==============================] - 912s - loss: 0.0056 - mse: 5.6077e-06 - val_loss: 0.0046 - val_mse: 4.5770e-06
Wrote model to dpos_cnn_weights_194.hdf
Epoch 196/196
1552/1552 [==============================] - 912s - loss: 0.0056 - mse: 5.5861e-06 - val_loss: 0.0046 - val_mse: 4.5689e-06
Wrote model to dpos_cnn_weights_195.hdf
Epoch 197/197
1552/1552 [==============================] - 911s - loss: 0.0056 - mse: 5.5638e-06 - val_loss: 0.0046 - val_mse: 4.5567e-06
Wrote model to dpos_cnn_weights_196.hdf
Epoch 198/198
1552/1552 [==============================] - 911s - loss: 0.0056 - mse: 5.5748e-06 - val_loss: 0.0045 - val_mse: 4.5461e-06
Wrote model to dpos_cnn_weights_197.hdf
Epoch 199/199
1552/1552 [==============================] - 912s - loss: 0.0055 - mse: 5.5349e-06 - val_loss: 0.0046 - val_mse: 4.6121e-06
Wrote model to dpos_cnn_weights_198.hdf
Epoch 200/200
1552/1552 [==============================] - 911s - loss: 0.0056 - mse: 5.5850e-06 - val_loss: 0.0045 - val_mse: 4.4795e-06
Wrote model to dpos_cnn_weights_199.hdf
Epoch 201/201
1552/1552 [==============================] - 912s - loss: 0.0056 - mse: 5.5694e-06 - val_loss: 0.0045 - val_mse: 4.4776e-06
Wrote model to dpos_cnn_weights_200.hdf
Epoch 202/202
1552/1552 [==============================] - 911s - loss: 0.0056 - mse: 5.5593e-06 - val_loss: 0.0045 - val_mse: 4.5208e-06
Wrote model to dpos_cnn_weights_201.hdf
Epoch 203/203
1552/1552 [==============================] - 911s - loss: 0.0055 - mse: 5.5366e-06 - val_loss: 0.0045 - val_mse: 4.4746e-06
Wrote model to dpos_cnn_weights_202.hdf
Epoch 204/204
1552/1552 [==============================] - 912s - loss: 0.0056 - mse: 5.5614e-06 - val_loss: 0.0045 - val_mse: 4.5464e-06
Wrote model to dpos_cnn_weights_203.hdf
Epoch 205/205
1552/1552 [==============================] - 912s - loss: 0.0056 - mse: 5.5731e-06 - val_loss: 0.0046 - val_mse: 4.6170e-06
Wrote model to dpos_cnn_weights_204.hdf
Epoch 206/206
1552/1552 [==============================] - 911s - loss: 0.0056 - mse: 5.5765e-06 - val_loss: 0.0045 - val_mse: 4.4958e-06
Wrote model to dpos_cnn_weights_205.hdf
Epoch 207/207
1552/1552 [==============================] - 912s - loss: 0.0056 - mse: 5.5636e-06 - val_loss: 0.0045 - val_mse: 4.5371e-06
Wrote model to dpos_cnn_weights_206.hdf
Epoch 208/208
1552/1552 [==============================] - 911s - loss: 0.0056 - mse: 5.5801e-06 - val_loss: 0.0045 - val_mse: 4.5318e-06
Wrote model to dpos_cnn_weights_207.hdf
Epoch 209/209
1552/1552 [==============================] - 912s - loss: 0.0055 - mse: 5.5446e-06 - val_loss: 0.0047 - val_mse: 4.6633e-06
Wrote model to dpos_cnn_weights_208.hdf
Epoch 210/210
1552/1552 [==============================] - 912s - loss: 0.0055 - mse: 5.5365e-06 - val_loss: 0.0046 - val_mse: 4.6196e-06
Wrote model to dpos_cnn_weights_209.hdf
Epoch 211/211
1552/1552 [==============================] - 912s - loss: 0.0055 - mse: 5.5182e-06 - val_loss: 0.0046 - val_mse: 4.5652e-06
Wrote model to dpos_cnn_weights_210.hdf
Epoch 212/212
1552/1552 [==============================] - 911s - loss: 0.0055 - mse: 5.5305e-06 - val_loss: 0.0045 - val_mse: 4.4926e-06
Wrote model to dpos_cnn_weights_211.hdf
Epoch 213/213
1552/1552 [==============================] - 912s - loss: 0.0055 - mse: 5.5265e-06 - val_loss: 0.0045 - val_mse: 4.5402e-06
Wrote model to dpos_cnn_weights_212.hdf
Epoch 214/214
1552/1552 [==============================] - 912s - loss: 0.0055 - mse: 5.5392e-06 - val_loss: 0.0046 - val_mse: 4.5528e-06
Wrote model to dpos_cnn_weights_213.hdf
Epoch 215/215
1552/1552 [==============================] - 912s - loss: 0.0055 - mse: 5.5453e-06 - val_loss: 0.0046 - val_mse: 4.5727e-06
Wrote model to dpos_cnn_weights_214.hdf
Epoch 216/216
1552/1552 [==============================] - 912s - loss: 0.0055 - mse: 5.5118e-06 - val_loss: 0.0045 - val_mse: 4.5404e-06
Wrote model to dpos_cnn_weights_215.hdf
Epoch 217/217
1552/1552 [==============================] - 912s - loss: 0.0055 - mse: 5.5319e-06 - val_loss: 0.0045 - val_mse: 4.4879e-06
Wrote model to dpos_cnn_weights_216.hdf
Epoch 218/218
1552/1552 [==============================] - 912s - loss: 0.0055 - mse: 5.5335e-06 - val_loss: 0.0045 - val_mse: 4.4663e-06
Wrote model to dpos_cnn_weights_217.hdf
Epoch 219/219
1552/1552 [==============================] - 911s - loss: 0.0055 - mse: 5.5024e-06 - val_loss: 0.0045 - val_mse: 4.4865e-06
Wrote model to dpos_cnn_weights_218.hdf
Epoch 220/220
1552/1552 [==============================] - 912s - loss: 0.0055 - mse: 5.5106e-06 - val_loss: 0.0045 - val_mse: 4.4929e-06
Wrote model to dpos_cnn_weights_219.hdf
Epoch 221/221
1552/1552 [==============================] - 912s - loss: 0.0055 - mse: 5.5370e-06 - val_loss: 0.0044 - val_mse: 4.4107e-06
Wrote model to dpos_cnn_weights_220.hdf
Epoch 222/222
1552/1552 [==============================] - 912s - loss: 0.0055 - mse: 5.5015e-06 - val_loss: 0.0045 - val_mse: 4.4691e-06
Wrote model to dpos_cnn_weights_221.hdf
Epoch 223/223
1552/1552 [==============================] - 912s - loss: 0.0055 - mse: 5.5163e-06 - val_loss: 0.0045 - val_mse: 4.5456e-06
Wrote model to dpos_cnn_weights_222.hdf
Epoch 224/224
1552/1552 [==============================] - 912s - loss: 0.0055 - mse: 5.5038e-06 - val_loss: 0.0045 - val_mse: 4.4774e-06
Wrote model to dpos_cnn_weights_223.hdf
Epoch 225/225
1552/1552 [==============================] - 911s - loss: 0.0055 - mse: 5.4919e-06 - val_loss: 0.0045 - val_mse: 4.5070e-06
Wrote model to dpos_cnn_weights_224.hdf
Epoch 226/226
1552/1552 [==============================] - 912s - loss: 0.0055 - mse: 5.4903e-06 - val_loss: 0.0045 - val_mse: 4.4916e-06
Wrote model to dpos_cnn_weights_225.hdf
Epoch 227/227
1552/1552 [==============================] - 911s - loss: 0.0055 - mse: 5.4788e-06 - val_loss: 0.0045 - val_mse: 4.5275e-06
Wrote model to dpos_cnn_weights_226.hdf
Epoch 228/228
1552/1552 [==============================] - 912s - loss: 0.0055 - mse: 5.5400e-06 - val_loss: 0.0045 - val_mse: 4.5267e-06
Wrote model to dpos_cnn_weights_227.hdf
Epoch 229/229
1552/1552 [==============================] - 912s - loss: 0.0055 - mse: 5.5248e-06 - val_loss: 0.0045 - val_mse: 4.5272e-06
Wrote model to dpos_cnn_weights_228.hdf
Epoch 230/230
1552/1552 [==============================] - 911s - loss: 0.0055 - mse: 5.4951e-06 - val_loss: 0.0045 - val_mse: 4.5016e-06
Wrote model to dpos_cnn_weights_229.hdf
Epoch 231/231
1552/1552 [==============================] - 911s - loss: 0.0055 - mse: 5.5102e-06 - val_loss: 0.0045 - val_mse: 4.5295e-06
Wrote model to dpos_cnn_weights_230.hdf
Epoch 232/232
1552/1552 [==============================] - 912s - loss: 0.0055 - mse: 5.4928e-06 - val_loss: 0.0044 - val_mse: 4.4336e-06
Wrote model to dpos_cnn_weights_231.hdf
Epoch 233/233
1552/1552 [==============================] - 912s - loss: 0.0055 - mse: 5.5215e-06 - val_loss: 0.0045 - val_mse: 4.4810e-06
Wrote model to dpos_cnn_weights_232.hdf
Epoch 234/234
1552/1552 [==============================] - 912s - loss: 0.0055 - mse: 5.4667e-06 - val_loss: 0.0045 - val_mse: 4.4994e-06
Wrote model to dpos_cnn_weights_233.hdf
Epoch 235/235
1552/1552 [==============================] - 912s - loss: 0.0055 - mse: 5.4876e-06 - val_loss: 0.0044 - val_mse: 4.4456e-06
Wrote model to dpos_cnn_weights_234.hdf
Epoch 236/236
1552/1552 [==============================] - 912s - loss: 0.0055 - mse: 5.4826e-06 - val_loss: 0.0045 - val_mse: 4.4690e-06
Wrote model to dpos_cnn_weights_235.hdf
Epoch 237/237
1552/1552 [==============================] - 912s - loss: 0.0055 - mse: 5.4947e-06 - val_loss: 0.0045 - val_mse: 4.4659e-06
Wrote model to dpos_cnn_weights_236.hdf
Epoch 238/238
1552/1552 [==============================] - 912s - loss: 0.0055 - mse: 5.4709e-06 - val_loss: 0.0044 - val_mse: 4.4351e-06
Wrote model to dpos_cnn_weights_237.hdf
Epoch 239/239
1552/1552 [==============================] - 911s - loss: 0.0055 - mse: 5.4640e-06 - val_loss: 0.0045 - val_mse: 4.4527e-06
Wrote model to dpos_cnn_weights_238.hdf
Epoch 240/240
1552/1552 [==============================] - 912s - loss: 0.0055 - mse: 5.4881e-06 - val_loss: 0.0045 - val_mse: 4.5372e-06
Wrote model to dpos_cnn_weights_239.hdf
Epoch 241/241
1552/1552 [==============================] - 912s - loss: 0.0055 - mse: 5.5124e-06 - val_loss: 0.0046 - val_mse: 4.5668e-06
Wrote model to dpos_cnn_weights_240.hdf
Epoch 242/242
1552/1552 [==============================] - 912s - loss: 0.0055 - mse: 5.4828e-06 - val_loss: 0.0044 - val_mse: 4.4196e-06
Wrote model to dpos_cnn_weights_241.hdf
Epoch 243/243
1552/1552 [==============================] - 912s - loss: 0.0055 - mse: 5.4830e-06 - val_loss: 0.0045 - val_mse: 4.4980e-06
Wrote model to dpos_cnn_weights_242.hdf
Epoch 244/244
1552/1552 [==============================] - 911s - loss: 0.0055 - mse: 5.4735e-06 - val_loss: 0.0045 - val_mse: 4.5447e-06
Wrote model to dpos_cnn_weights_243.hdf
Epoch 245/245
1552/1552 [==============================] - 911s - loss: 0.0055 - mse: 5.5131e-06 - val_loss: 0.0045 - val_mse: 4.4760e-06
Wrote model to dpos_cnn_weights_244.hdf
Epoch 246/246
1552/1552 [==============================] - 912s - loss: 0.0055 - mse: 5.4802e-06 - val_loss: 0.0045 - val_mse: 4.5462e-06
Wrote model to dpos_cnn_weights_245.hdf
Epoch 247/247
1552/1552 [==============================] - 912s - loss: 0.0055 - mse: 5.4633e-06 - val_loss: 0.0045 - val_mse: 4.5009e-06
Wrote model to dpos_cnn_weights_246.hdf
Epoch 248/248
1552/1552 [==============================] - 912s - loss: 0.0055 - mse: 5.4628e-06 - val_loss: 0.0045 - val_mse: 4.4813e-06
Wrote model to dpos_cnn_weights_247.hdf
Epoch 249/249
1552/1552 [==============================] - 911s - loss: 0.0055 - mse: 5.4542e-06 - val_loss: 0.0045 - val_mse: 4.5480e-06
Wrote model to dpos_cnn_weights_248.hdf
Epoch 250/250
1552/1552 [==============================] - 911s - loss: 0.0055 - mse: 5.4651e-06 - val_loss: 0.0044 - val_mse: 4.4435e-06
Wrote model to dpos_cnn_weights_249.hdf
Epoch 251/251
1552/1552 [==============================] - 912s - loss: 0.0054 - mse: 5.4403e-06 - val_loss: 0.0045 - val_mse: 4.4611e-06
Wrote model to dpos_cnn_weights_250.hdf
Epoch 252/252
1552/1552 [==============================] - 911s - loss: 0.0055 - mse: 5.4800e-06 - val_loss: 0.0046 - val_mse: 4.5605e-06
Wrote model to dpos_cnn_weights_251.hdf
Epoch 253/253
1552/1552 [==============================] - 912s - loss: 0.0055 - mse: 5.4680e-06 - val_loss: 0.0045 - val_mse: 4.5049e-06
Wrote model to dpos_cnn_weights_252.hdf
Epoch 254/254
1552/1552 [==============================] - 912s - loss: 0.0054 - mse: 5.4390e-06 - val_loss: 0.0045 - val_mse: 4.4888e-06
Wrote model to dpos_cnn_weights_253.hdf
Epoch 255/255
1552/1552 [==============================] - 911s - loss: 0.0054 - mse: 5.4447e-06 - val_loss: 0.0045 - val_mse: 4.4760e-06
Wrote model to dpos_cnn_weights_254.hdf
Epoch 256/256
1552/1552 [==============================] - 912s - loss: 0.0054 - mse: 5.4400e-06 - val_loss: 0.0045 - val_mse: 4.4611e-06
Wrote model to dpos_cnn_weights_255.hdf
Epoch 257/257
1552/1552 [==============================] - 912s - loss: 0.0054 - mse: 5.4413e-06 - val_loss: 0.0045 - val_mse: 4.4692e-06
Wrote model to dpos_cnn_weights_256.hdf
Epoch 258/258
1552/1552 [==============================] - 912s - loss: 0.0054 - mse: 5.4393e-06 - val_loss: 0.0045 - val_mse: 4.5056e-06
Wrote model to dpos_cnn_weights_257.hdf
Epoch 259/259
1552/1552 [==============================] - 912s - loss: 0.0055 - mse: 5.4568e-06 - val_loss: 0.0045 - val_mse: 4.5011e-06
Wrote model to dpos_cnn_weights_258.hdf
Epoch 260/260
1552/1552 [==============================] - 912s - loss: 0.0054 - mse: 5.4005e-06 - val_loss: 0.0045 - val_mse: 4.4903e-06
Wrote model to dpos_cnn_weights_259.hdf
Epoch 261/261
1552/1552 [==============================] - 912s - loss: 0.0054 - mse: 5.4296e-06 - val_loss: 0.0045 - val_mse: 4.5002e-06
Wrote model to dpos_cnn_weights_260.hdf
Epoch 262/262
1552/1552 [==============================] - 912s - loss: 0.0055 - mse: 5.4551e-06 - val_loss: 0.0045 - val_mse: 4.5302e-06
Wrote model to dpos_cnn_weights_261.hdf
Epoch 263/263
1552/1552 [==============================] - 912s - loss: 0.0054 - mse: 5.4224e-06 - val_loss: 0.0044 - val_mse: 4.4125e-06
Wrote model to dpos_cnn_weights_262.hdf
Epoch 264/264
1552/1552 [==============================] - 911s - loss: 0.0055 - mse: 5.4689e-06 - val_loss: 0.0046 - val_mse: 4.5582e-06
Wrote model to dpos_cnn_weights_263.hdf
Epoch 265/265
1552/1552 [==============================] - 911s - loss: 0.0054 - mse: 5.4474e-06 - val_loss: 0.0046 - val_mse: 4.5582e-06
Wrote model to dpos_cnn_weights_264.hdf
Epoch 266/266
1552/1552 [==============================] - 912s - loss: 0.0054 - mse: 5.4381e-06 - val_loss: 0.0045 - val_mse: 4.4572e-06
Wrote model to dpos_cnn_weights_265.hdf
Epoch 267/267
1552/1552 [==============================] - 911s - loss: 0.0054 - mse: 5.4467e-06 - val_loss: 0.0044 - val_mse: 4.4492e-06
Wrote model to dpos_cnn_weights_266.hdf
Epoch 268/268
1552/1552 [==============================] - 911s - loss: 0.0055 - mse: 5.4705e-06 - val_loss: 0.0045 - val_mse: 4.4558e-06
Wrote model to dpos_cnn_weights_267.hdf
Epoch 269/269
1552/1552 [==============================] - 912s - loss: 0.0055 - mse: 5.4717e-06 - val_loss: 0.0045 - val_mse: 4.5113e-06
Wrote model to dpos_cnn_weights_268.hdf
Epoch 270/270
1552/1552 [==============================] - 911s - loss: 0.0054 - mse: 5.4363e-06 - val_loss: 0.0044 - val_mse: 4.4131e-06
Wrote model to dpos_cnn_weights_269.hdf
Epoch 271/271
1552/1552 [==============================] - 911s - loss: 0.0054 - mse: 5.4164e-06 - val_loss: 0.0043 - val_mse: 4.3461e-06
Wrote model to dpos_cnn_weights_270.hdf
Epoch 272/272
1552/1552 [==============================] - 912s - loss: 0.0054 - mse: 5.4199e-06 - val_loss: 0.0045 - val_mse: 4.5024e-06
Wrote model to dpos_cnn_weights_271.hdf
Epoch 273/273
1552/1552 [==============================] - 911s - loss: 0.0054 - mse: 5.4249e-06 - val_loss: 0.0044 - val_mse: 4.4232e-06
Wrote model to dpos_cnn_weights_272.hdf
Epoch 274/274
1552/1552 [==============================] - 912s - loss: 0.0054 - mse: 5.4324e-06 - val_loss: 0.0045 - val_mse: 4.5359e-06
Wrote model to dpos_cnn_weights_273.hdf
Epoch 275/275
1552/1552 [==============================] - 912s - loss: 0.0054 - mse: 5.3904e-06 - val_loss: 0.0045 - val_mse: 4.4779e-06
Wrote model to dpos_cnn_weights_274.hdf
Epoch 276/276
1552/1552 [==============================] - 911s - loss: 0.0054 - mse: 5.4203e-06 - val_loss: 0.0045 - val_mse: 4.4806e-06
Wrote model to dpos_cnn_weights_275.hdf
Epoch 277/277
1552/1552 [==============================] - 911s - loss: 0.0054 - mse: 5.4016e-06 - val_loss: 0.0045 - val_mse: 4.4605e-06
Wrote model to dpos_cnn_weights_276.hdf
Epoch 278/278
1552/1552 [==============================] - 911s - loss: 0.0054 - mse: 5.4049e-06 - val_loss: 0.0045 - val_mse: 4.4980e-06
Wrote model to dpos_cnn_weights_277.hdf
Epoch 279/279
1552/1552 [==============================] - 912s - loss: 0.0054 - mse: 5.4205e-06 - val_loss: 0.0045 - val_mse: 4.4514e-06
Wrote model to dpos_cnn_weights_278.hdf
Epoch 280/280
1552/1552 [==============================] - 911s - loss: 0.0054 - mse: 5.3877e-06 - val_loss: 0.0045 - val_mse: 4.4942e-06
Wrote model to dpos_cnn_weights_279.hdf
Epoch 281/281
1552/1552 [==============================] - 912s - loss: 0.0054 - mse: 5.3839e-06 - val_loss: 0.0045 - val_mse: 4.4754e-06
Wrote model to dpos_cnn_weights_280.hdf
Epoch 282/282
1552/1552 [==============================] - 911s - loss: 0.0054 - mse: 5.3934e-06 - val_loss: 0.0044 - val_mse: 4.4498e-06
Wrote model to dpos_cnn_weights_281.hdf
Epoch 283/283
1552/1552 [==============================] - 911s - loss: 0.0054 - mse: 5.4304e-06 - val_loss: 0.0046 - val_mse: 4.5509e-06
Wrote model to dpos_cnn_weights_282.hdf
Epoch 284/284
1552/1552 [==============================] - 912s - loss: 0.0054 - mse: 5.4163e-06 - val_loss: 0.0044 - val_mse: 4.4020e-06
Wrote model to dpos_cnn_weights_283.hdf
Epoch 285/285
1552/1552 [==============================] - 912s - loss: 0.0055 - mse: 5.4593e-06 - val_loss: 0.0045 - val_mse: 4.5043e-06
Wrote model to dpos_cnn_weights_284.hdf
Epoch 286/286
1552/1552 [==============================] - 912s - loss: 0.0054 - mse: 5.3843e-06 - val_loss: 0.0045 - val_mse: 4.4725e-06
Wrote model to dpos_cnn_weights_285.hdf
Epoch 287/287
1552/1552 [==============================] - 912s - loss: 0.0054 - mse: 5.4053e-06 - val_loss: 0.0044 - val_mse: 4.4198e-06
Wrote model to dpos_cnn_weights_286.hdf
Epoch 288/288
1552/1552 [==============================] - 912s - loss: 0.0054 - mse: 5.3933e-06 - val_loss: 0.0044 - val_mse: 4.3923e-06
Wrote model to dpos_cnn_weights_287.hdf
Epoch 289/289
1552/1552 [==============================] - 912s - loss: 0.0054 - mse: 5.3945e-06 - val_loss: 0.0045 - val_mse: 4.4711e-06
Wrote model to dpos_cnn_weights_288.hdf
Epoch 290/290
1552/1552 [==============================] - 911s - loss: 0.0054 - mse: 5.4077e-06 - val_loss: 0.0044 - val_mse: 4.4465e-06
Wrote model to dpos_cnn_weights_289.hdf
Epoch 291/291
1552/1552 [==============================] - 912s - loss: 0.0054 - mse: 5.4284e-06 - val_loss: 0.0045 - val_mse: 4.4989e-06
Wrote model to dpos_cnn_weights_290.hdf
Epoch 292/292
1552/1552 [==============================] - 912s - loss: 0.0054 - mse: 5.4173e-06 - val_loss: 0.0044 - val_mse: 4.3852e-06
Wrote model to dpos_cnn_weights_291.hdf
Epoch 293/293
1552/1552 [==============================] - 912s - loss: 0.0054 - mse: 5.3599e-06 - val_loss: 0.0045 - val_mse: 4.5149e-06
Wrote model to dpos_cnn_weights_292.hdf
Epoch 294/294
1552/1552 [==============================] - 912s - loss: 0.0054 - mse: 5.4110e-06 - val_loss: 0.0046 - val_mse: 4.5632e-06
Wrote model to dpos_cnn_weights_293.hdf
Epoch 295/295
1552/1552 [==============================] - 912s - loss: 0.0054 - mse: 5.4179e-06 - val_loss: 0.0045 - val_mse: 4.4948e-06
Wrote model to dpos_cnn_weights_294.hdf
Epoch 296/296
1552/1552 [==============================] - 912s - loss: 0.0054 - mse: 5.3962e-06 - val_loss: 0.0044 - val_mse: 4.4405e-06
Wrote model to dpos_cnn_weights_295.hdf
Epoch 297/297
1552/1552 [==============================] - 911s - loss: 0.0054 - mse: 5.3917e-06 - val_loss: 0.0045 - val_mse: 4.4565e-06
Wrote model to dpos_cnn_weights_296.hdf
Epoch 298/298
1552/1552 [==============================] - 912s - loss: 0.0054 - mse: 5.4092e-06 - val_loss: 0.0045 - val_mse: 4.4686e-06
Wrote model to dpos_cnn_weights_297.hdf
Epoch 299/299
1552/1552 [==============================] - 911s - loss: 0.0054 - mse: 5.4041e-06 - val_loss: 0.0044 - val_mse: 4.3980e-06
Wrote model to dpos_cnn_weights_298.hdf
Epoch 300/300
1552/1552 [==============================] - 912s - loss: 0.0054 - mse: 5.3823e-06 - val_loss: 0.0045 - val_mse: 4.4823e-06
Wrote model to dpos_cnn_weights_299.hdf
Epoch 301/301
1552/1552 [==============================] - 912s - loss: 0.0054 - mse: 5.4048e-06 - val_loss: 0.0045 - val_mse: 4.5268e-06
Wrote model to dpos_cnn_weights_300.hdf
Epoch 302/302
1552/1552 [==============================] - 912s - loss: 0.0054 - mse: 5.3918e-06 - val_loss: 0.0044 - val_mse: 4.4461e-06
Wrote model to dpos_cnn_weights_301.hdf
Epoch 303/303
1552/1552 [==============================] - 912s - loss: 0.0054 - mse: 5.4204e-06 - val_loss: 0.0045 - val_mse: 4.4857e-06
Wrote model to dpos_cnn_weights_302.hdf
Epoch 304/304
1552/1552 [==============================] - 912s - loss: 0.0054 - mse: 5.3811e-06 - val_loss: 0.0044 - val_mse: 4.3654e-06
Wrote model to dpos_cnn_weights_303.hdf
Epoch 305/305
1552/1552 [==============================] - 912s - loss: 0.0054 - mse: 5.4015e-06 - val_loss: 0.0044 - val_mse: 4.4031e-06
Wrote model to dpos_cnn_weights_304.hdf
Epoch 306/306
1552/1552 [==============================] - 912s - loss: 0.0054 - mse: 5.4198e-06 - val_loss: 0.0044 - val_mse: 4.4032e-06
Wrote model to dpos_cnn_weights_305.hdf
Epoch 307/307
1552/1552 [==============================] - 912s - loss: 0.0054 - mse: 5.3761e-06 - val_loss: 0.0044 - val_mse: 4.4224e-06
Wrote model to dpos_cnn_weights_306.hdf
Epoch 308/308
1552/1552 [==============================] - 912s - loss: 0.0054 - mse: 5.3658e-06 - val_loss: 0.0044 - val_mse: 4.4105e-06
Wrote model to dpos_cnn_weights_307.hdf
Epoch 309/309
1552/1552 [==============================] - 911s - loss: 0.0054 - mse: 5.4056e-06 - val_loss: 0.0044 - val_mse: 4.3736e-06
Wrote model to dpos_cnn_weights_308.hdf
Epoch 310/310
1552/1552 [==============================] - 912s - loss: 0.0054 - mse: 5.3742e-06 - val_loss: 0.0044 - val_mse: 4.4156e-06
Wrote model to dpos_cnn_weights_309.hdf
Epoch 311/311
1552/1552 [==============================] - 912s - loss: 0.0054 - mse: 5.3770e-06 - val_loss: 0.0045 - val_mse: 4.4694e-06
Wrote model to dpos_cnn_weights_310.hdf
Epoch 312/312
1552/1552 [==============================] - 912s - loss: 0.0054 - mse: 5.4020e-06 - val_loss: 0.0045 - val_mse: 4.4596e-06
Wrote model to dpos_cnn_weights_311.hdf
Epoch 313/313
1552/1552 [==============================] - 912s - loss: 0.0053 - mse: 5.3356e-06 - val_loss: 0.0046 - val_mse: 4.5789e-06
Wrote model to dpos_cnn_weights_312.hdf
Epoch 314/314
1552/1552 [==============================] - 912s - loss: 0.0054 - mse: 5.3978e-06 - val_loss: 0.0044 - val_mse: 4.3626e-06
Wrote model to dpos_cnn_weights_313.hdf
Epoch 315/315
1552/1552 [==============================] - 912s - loss: 0.0054 - mse: 5.3508e-06 - val_loss: 0.0044 - val_mse: 4.4046e-06
Wrote model to dpos_cnn_weights_314.hdf
Epoch 316/316
1552/1552 [==============================] - 912s - loss: 0.0054 - mse: 5.3705e-06 - val_loss: 0.0046 - val_mse: 4.5550e-06
Wrote model to dpos_cnn_weights_315.hdf
Epoch 317/317
1552/1552 [==============================] - 911s - loss: 0.0054 - mse: 5.3642e-06 - val_loss: 0.0044 - val_mse: 4.4139e-06
Wrote model to dpos_cnn_weights_316.hdf
Epoch 318/318
1552/1552 [==============================] - 912s - loss: 0.0054 - mse: 5.3671e-06 - val_loss: 0.0044 - val_mse: 4.4363e-06
Wrote model to dpos_cnn_weights_317.hdf
Epoch 319/319
1552/1552 [==============================] - 912s - loss: 0.0053 - mse: 5.3497e-06 - val_loss: 0.0045 - val_mse: 4.4561e-06
Wrote model to dpos_cnn_weights_318.hdf
Epoch 320/320
1552/1552 [==============================] - 912s - loss: 0.0054 - mse: 5.3845e-06 - val_loss: 0.0044 - val_mse: 4.4080e-06
Wrote model to dpos_cnn_weights_319.hdf
Epoch 321/321
1552/1552 [==============================] - 912s - loss: 0.0054 - mse: 5.3637e-06 - val_loss: 0.0045 - val_mse: 4.4662e-06
Wrote model to dpos_cnn_weights_320.hdf
Epoch 322/322
1552/1552 [==============================] - 911s - loss: 0.0054 - mse: 5.3756e-06 - val_loss: 0.0044 - val_mse: 4.3828e-06
Wrote model to dpos_cnn_weights_321.hdf
Epoch 323/323
1552/1552 [==============================] - 912s - loss: 0.0054 - mse: 5.3843e-06 - val_loss: 0.0044 - val_mse: 4.4424e-06
Wrote model to dpos_cnn_weights_322.hdf
Epoch 324/324
1552/1552 [==============================] - 912s - loss: 0.0054 - mse: 5.3733e-06 - val_loss: 0.0044 - val_mse: 4.3933e-06
Wrote model to dpos_cnn_weights_323.hdf
Epoch 325/325
1552/1552 [==============================] - 911s - loss: 0.0054 - mse: 5.4064e-06 - val_loss: 0.0045 - val_mse: 4.4586e-06
Wrote model to dpos_cnn_weights_324.hdf
Epoch 326/326
1552/1552 [==============================] - 912s - loss: 0.0053 - mse: 5.3412e-06 - val_loss: 0.0044 - val_mse: 4.3866e-06
Wrote model to dpos_cnn_weights_325.hdf
Epoch 327/327
1552/1552 [==============================] - 911s - loss: 0.0054 - mse: 5.3559e-06 - val_loss: 0.0044 - val_mse: 4.3612e-06
Wrote model to dpos_cnn_weights_326.hdf
Epoch 328/328
1552/1552 [==============================] - 912s - loss: 0.0054 - mse: 5.3606e-06 - val_loss: 0.0044 - val_mse: 4.3504e-06
Wrote model to dpos_cnn_weights_327.hdf
Epoch 329/329
1552/1552 [==============================] - 912s - loss: 0.0054 - mse: 5.3720e-06 - val_loss: 0.0044 - val_mse: 4.3726e-06
Wrote model to dpos_cnn_weights_328.hdf
Epoch 330/330
1552/1552 [==============================] - 912s - loss: 0.0053 - mse: 5.3371e-06 - val_loss: 0.0044 - val_mse: 4.3934e-06
Wrote model to dpos_cnn_weights_329.hdf
Epoch 331/331
1552/1552 [==============================] - 912s - loss: 0.0054 - mse: 5.3729e-06 - val_loss: 0.0044 - val_mse: 4.4398e-06
Wrote model to dpos_cnn_weights_330.hdf
Epoch 332/332
1552/1552 [==============================] - 912s - loss: 0.0053 - mse: 5.3402e-06 - val_loss: 0.0045 - val_mse: 4.4899e-06
Wrote model to dpos_cnn_weights_331.hdf
Epoch 333/333
1552/1552 [==============================] - 911s - loss: 0.0054 - mse: 5.3520e-06 - val_loss: 0.0043 - val_mse: 4.3383e-06
Wrote model to dpos_cnn_weights_332.hdf
Epoch 334/334
1552/1552 [==============================] - 911s - loss: 0.0053 - mse: 5.3399e-06 - val_loss: 0.0044 - val_mse: 4.3520e-06
Wrote model to dpos_cnn_weights_333.hdf
Epoch 335/335
1552/1552 [==============================] - 912s - loss: 0.0053 - mse: 5.3395e-06 - val_loss: 0.0044 - val_mse: 4.4065e-06
Wrote model to dpos_cnn_weights_334.hdf
Epoch 336/336
1552/1552 [==============================] - 912s - loss: 0.0053 - mse: 5.3291e-06 - val_loss: 0.0043 - val_mse: 4.3412e-06
Wrote model to dpos_cnn_weights_335.hdf
Epoch 337/337
1552/1552 [==============================] - 911s - loss: 0.0053 - mse: 5.3434e-06 - val_loss: 0.0044 - val_mse: 4.3727e-06
Wrote model to dpos_cnn_weights_336.hdf
Epoch 338/338
1552/1552 [==============================] - 912s - loss: 0.0054 - mse: 5.3971e-06 - val_loss: 0.0045 - val_mse: 4.4836e-06
Wrote model to dpos_cnn_weights_337.hdf
Epoch 339/339
1552/1552 [==============================] - 912s - loss: 0.0054 - mse: 5.3598e-06 - val_loss: 0.0044 - val_mse: 4.3736e-06
Wrote model to dpos_cnn_weights_338.hdf
Epoch 340/340
1552/1552 [==============================] - 912s - loss: 0.0054 - mse: 5.3694e-06 - val_loss: 0.0044 - val_mse: 4.4395e-06
Wrote model to dpos_cnn_weights_339.hdf
Epoch 341/341
1552/1552 [==============================] - 912s - loss: 0.0054 - mse: 5.3835e-06 - val_loss: 0.0044 - val_mse: 4.4019e-06
Wrote model to dpos_cnn_weights_340.hdf
Epoch 342/342
1552/1552 [==============================] - 912s - loss: 0.0054 - mse: 5.3533e-06 - val_loss: 0.0045 - val_mse: 4.4925e-06
Wrote model to dpos_cnn_weights_341.hdf
Epoch 343/343
1552/1552 [==============================] - 913s - loss: 0.0054 - mse: 5.3524e-06 - val_loss: 0.0044 - val_mse: 4.3715e-06
Wrote model to dpos_cnn_weights_342.hdf
Epoch 344/344
1552/1552 [==============================] - 912s - loss: 0.0054 - mse: 5.3646e-06 - val_loss: 0.0044 - val_mse: 4.3531e-06
Wrote model to dpos_cnn_weights_343.hdf
Epoch 345/345
1552/1552 [==============================] - 912s - loss: 0.0053 - mse: 5.3305e-06 - val_loss: 0.0044 - val_mse: 4.4355e-06
Wrote model to dpos_cnn_weights_344.hdf
Epoch 346/346
1552/1552 [==============================] - 912s - loss: 0.0054 - mse: 5.3686e-06 - val_loss: 0.0044 - val_mse: 4.3758e-06
Wrote model to dpos_cnn_weights_345.hdf
Epoch 347/347
1552/1552 [==============================] - 913s - loss: 0.0054 - mse: 5.3535e-06 - val_loss: 0.0045 - val_mse: 4.4763e-06
Wrote model to dpos_cnn_weights_346.hdf
Epoch 348/348
1552/1552 [==============================] - 912s - loss: 0.0053 - mse: 5.3296e-06 - val_loss: 0.0044 - val_mse: 4.4221e-06
Wrote model to dpos_cnn_weights_347.hdf
Epoch 349/349
1552/1552 [==============================] - 911s - loss: 0.0053 - mse: 5.3463e-06 - val_loss: 0.0043 - val_mse: 4.2732e-06
Wrote model to dpos_cnn_weights_348.hdf
Epoch 350/350
1552/1552 [==============================] - 912s - loss: 0.0054 - mse: 5.3510e-06 - val_loss: 0.0045 - val_mse: 4.4541e-06
Wrote model to dpos_cnn_weights_349.hdf
Epoch 351/351
1552/1552 [==============================] - 912s - loss: 0.0053 - mse: 5.3424e-06 - val_loss: 0.0043 - val_mse: 4.3024e-06
Wrote model to dpos_cnn_weights_350.hdf
Epoch 352/352
1552/1552 [==============================] - 912s - loss: 0.0054 - mse: 5.3632e-06 - val_loss: 0.0044 - val_mse: 4.4030e-06
Wrote model to dpos_cnn_weights_351.hdf
Epoch 353/353
1552/1552 [==============================] - 912s - loss: 0.0053 - mse: 5.3296e-06 - val_loss: 0.0044 - val_mse: 4.4364e-06
Wrote model to dpos_cnn_weights_352.hdf
Epoch 354/354
1552/1552 [==============================] - 912s - loss: 0.0053 - mse: 5.3267e-06 - val_loss: 0.0044 - val_mse: 4.4278e-06
Wrote model to dpos_cnn_weights_353.hdf
Epoch 355/355
1552/1552 [==============================] - 912s - loss: 0.0053 - mse: 5.3451e-06 - val_loss: 0.0045 - val_mse: 4.4753e-06
Wrote model to dpos_cnn_weights_354.hdf
Epoch 356/356
1552/1552 [==============================] - 912s - loss: 0.0054 - mse: 5.3601e-06 - val_loss: 0.0044 - val_mse: 4.4420e-06
Wrote model to dpos_cnn_weights_355.hdf
Epoch 357/357
1552/1552 [==============================] - 912s - loss: 0.0053 - mse: 5.3235e-06 - val_loss: 0.0044 - val_mse: 4.4116e-06
Wrote model to dpos_cnn_weights_356.hdf
Epoch 358/358
1552/1552 [==============================] - 912s - loss: 0.0053 - mse: 5.3205e-06 - val_loss: 0.0044 - val_mse: 4.4050e-06
Wrote model to dpos_cnn_weights_357.hdf
Epoch 359/359
1552/1552 [==============================] - 912s - loss: 0.0053 - mse: 5.3280e-06 - val_loss: 0.0044 - val_mse: 4.4405e-06
Wrote model to dpos_cnn_weights_358.hdf
Epoch 360/360
1552/1552 [==============================] - 912s - loss: 0.0054 - mse: 5.3651e-06 - val_loss: 0.0045 - val_mse: 4.4585e-06
Wrote model to dpos_cnn_weights_359.hdf
Epoch 361/361
1552/1552 [==============================] - 911s - loss: 0.0053 - mse: 5.3183e-06 - val_loss: 0.0045 - val_mse: 4.4516e-06
Wrote model to dpos_cnn_weights_360.hdf
Epoch 362/362
1552/1552 [==============================] - 912s - loss: 0.0053 - mse: 5.3153e-06 - val_loss: 0.0044 - val_mse: 4.3957e-06
Wrote model to dpos_cnn_weights_361.hdf
Epoch 363/363
1552/1552 [==============================] - 912s - loss: 0.0054 - mse: 5.3618e-06 - val_loss: 0.0044 - val_mse: 4.4060e-06
Wrote model to dpos_cnn_weights_362.hdf
Epoch 364/364
1552/1552 [==============================] - 912s - loss: 0.0054 - mse: 5.3576e-06 - val_loss: 0.0045 - val_mse: 4.5471e-06
Wrote model to dpos_cnn_weights_363.hdf
Epoch 365/365
1552/1552 [==============================] - 912s - loss: 0.0053 - mse: 5.3363e-06 - val_loss: 0.0044 - val_mse: 4.4137e-06
Wrote model to dpos_cnn_weights_364.hdf
Epoch 366/366
1552/1552 [==============================] - 912s - loss: 0.0054 - mse: 5.3524e-06 - val_loss: 0.0044 - val_mse: 4.3683e-06
Wrote model to dpos_cnn_weights_365.hdf
Epoch 367/367
1552/1552 [==============================] - 911s - loss: 0.0053 - mse: 5.3435e-06 - val_loss: 0.0045 - val_mse: 4.5171e-06
Wrote model to dpos_cnn_weights_366.hdf
Epoch 368/368
1552/1552 [==============================] - 912s - loss: 0.0053 - mse: 5.3284e-06 - val_loss: 0.0044 - val_mse: 4.4500e-06
Wrote model to dpos_cnn_weights_367.hdf
Epoch 369/369
1552/1552 [==============================] - 912s - loss: 0.0054 - mse: 5.3774e-06 - val_loss: 0.0044 - val_mse: 4.4483e-06
Wrote model to dpos_cnn_weights_368.hdf
Epoch 370/370
1552/1552 [==============================] - 911s - loss: 0.0053 - mse: 5.2991e-06 - val_loss: 0.0044 - val_mse: 4.4306e-06
Wrote model to dpos_cnn_weights_369.hdf
Epoch 371/371
1552/1552 [==============================] - 912s - loss: 0.0053 - mse: 5.3162e-06 - val_loss: 0.0044 - val_mse: 4.3588e-06
Wrote model to dpos_cnn_weights_370.hdf
Epoch 372/372
1552/1552 [==============================] - 912s - loss: 0.0053 - mse: 5.3424e-06 - val_loss: 0.0045 - val_mse: 4.4943e-06
Wrote model to dpos_cnn_weights_371.hdf
Epoch 373/373
1552/1552 [==============================] - 912s - loss: 0.0053 - mse: 5.3481e-06 - val_loss: 0.0045 - val_mse: 4.4621e-06
Wrote model to dpos_cnn_weights_372.hdf
Epoch 374/374
1552/1552 [==============================] - 912s - loss: 0.0053 - mse: 5.2915e-06 - val_loss: 0.0044 - val_mse: 4.4204e-06
Wrote model to dpos_cnn_weights_373.hdf
Epoch 375/375
1552/1552 [==============================] - 912s - loss: 0.0053 - mse: 5.3370e-06 - val_loss: 0.0044 - val_mse: 4.4399e-06
Wrote model to dpos_cnn_weights_374.hdf
Epoch 376/376
1552/1552 [==============================] - 911s - loss: 0.0053 - mse: 5.2827e-06 - val_loss: 0.0045 - val_mse: 4.4960e-06
Wrote model to dpos_cnn_weights_375.hdf
Epoch 377/377
1552/1552 [==============================] - 911s - loss: 0.0053 - mse: 5.3209e-06 - val_loss: 0.0044 - val_mse: 4.4178e-06
Wrote model to dpos_cnn_weights_376.hdf
Epoch 378/378
1552/1552 [==============================] - 912s - loss: 0.0054 - mse: 5.3550e-06 - val_loss: 0.0043 - val_mse: 4.3388e-06
Wrote model to dpos_cnn_weights_377.hdf
Epoch 379/379
1552/1552 [==============================] - 912s - loss: 0.0053 - mse: 5.3211e-06 - val_loss: 0.0044 - val_mse: 4.4376e-06
Wrote model to dpos_cnn_weights_378.hdf
Epoch 380/380
1552/1552 [==============================] - 912s - loss: 0.0054 - mse: 5.3529e-06 - val_loss: 0.0044 - val_mse: 4.3631e-06
Wrote model to dpos_cnn_weights_379.hdf
Epoch 381/381
1552/1552 [==============================] - 912s - loss: 0.0053 - mse: 5.3302e-06 - val_loss: 0.0044 - val_mse: 4.4110e-06
Wrote model to dpos_cnn_weights_380.hdf
Epoch 382/382
1552/1552 [==============================] - 912s - loss: 0.0053 - mse: 5.3066e-06 - val_loss: 0.0045 - val_mse: 4.4595e-06
Wrote model to dpos_cnn_weights_381.hdf
Epoch 383/383
1552/1552 [==============================] - 911s - loss: 0.0053 - mse: 5.3064e-06 - val_loss: 0.0044 - val_mse: 4.4066e-06
Wrote model to dpos_cnn_weights_382.hdf
Epoch 384/384
1552/1552 [==============================] - 911s - loss: 0.0053 - mse: 5.3104e-06 - val_loss: 0.0045 - val_mse: 4.4928e-06
Wrote model to dpos_cnn_weights_383.hdf
Epoch 385/385
1552/1552 [==============================] - 912s - loss: 0.0053 - mse: 5.2606e-06 - val_loss: 0.0044 - val_mse: 4.4348e-06
Wrote model to dpos_cnn_weights_384.hdf
Epoch 386/386
1552/1552 [==============================] - 912s - loss: 0.0053 - mse: 5.2737e-06 - val_loss: 0.0044 - val_mse: 4.3902e-06
Wrote model to dpos_cnn_weights_385.hdf
Epoch 387/387
1552/1552 [==============================] - 912s - loss: 0.0053 - mse: 5.3169e-06 - val_loss: 0.0044 - val_mse: 4.3691e-06
Wrote model to dpos_cnn_weights_386.hdf
Epoch 388/388
1552/1552 [==============================] - 912s - loss: 0.0053 - mse: 5.3085e-06 - val_loss: 0.0044 - val_mse: 4.3796e-06
Wrote model to dpos_cnn_weights_387.hdf
Epoch 389/389
1552/1552 [==============================] - 912s - loss: 0.0053 - mse: 5.3193e-06 - val_loss: 0.0044 - val_mse: 4.3524e-06
Wrote model to dpos_cnn_weights_388.hdf
Epoch 390/390
1552/1552 [==============================] - 911s - loss: 0.0053 - mse: 5.3352e-06 - val_loss: 0.0045 - val_mse: 4.4529e-06
Wrote model to dpos_cnn_weights_389.hdf
Epoch 391/391
1552/1552 [==============================] - 912s - loss: 0.0053 - mse: 5.2550e-06 - val_loss: 0.0044 - val_mse: 4.3642e-06
Wrote model to dpos_cnn_weights_390.hdf
Epoch 392/392
1552/1552 [==============================] - 912s - loss: 0.0053 - mse: 5.2905e-06 - val_loss: 0.0044 - val_mse: 4.4212e-06
Wrote model to dpos_cnn_weights_391.hdf
Epoch 393/393
1552/1552 [==============================] - 912s - loss: 0.0053 - mse: 5.2728e-06 - val_loss: 0.0046 - val_mse: 4.5524e-06
Wrote model to dpos_cnn_weights_392.hdf
Epoch 394/394
1552/1552 [==============================] - 912s - loss: 0.0053 - mse: 5.3446e-06 - val_loss: 0.0044 - val_mse: 4.3680e-06
Wrote model to dpos_cnn_weights_393.hdf
Epoch 395/395
1552/1552 [==============================] - 912s - loss: 0.0053 - mse: 5.3076e-06 - val_loss: 0.0044 - val_mse: 4.3732e-06
Wrote model to dpos_cnn_weights_394.hdf
Epoch 396/396
1552/1552 [==============================] - 912s - loss: 0.0052 - mse: 5.2403e-06 - val_loss: 0.0045 - val_mse: 4.4677e-06
Wrote model to dpos_cnn_weights_395.hdf
Epoch 397/397
1552/1552 [==============================] - 911s - loss: 0.0053 - mse: 5.2888e-06 - val_loss: 0.0045 - val_mse: 4.4638e-06
Wrote model to dpos_cnn_weights_396.hdf
Epoch 398/398
1552/1552 [==============================] - 911s - loss: 0.0053 - mse: 5.2905e-06 - val_loss: 0.0044 - val_mse: 4.4291e-06
Wrote model to dpos_cnn_weights_397.hdf
Epoch 399/399
1552/1552 [==============================] - 912s - loss: 0.0053 - mse: 5.3308e-06 - val_loss: 0.0044 - val_mse: 4.4117e-06
Wrote model to dpos_cnn_weights_398.hdf
Epoch 400/400
1552/1552 [==============================] - 911s - loss: 0.0053 - mse: 5.2853e-06 - val_loss: 0.0044 - val_mse: 4.3518e-06
Wrote model to dpos_cnn_weights_399.hdf
Epoch 401/401
1552/1552 [==============================] - 912s - loss: 0.0053 - mse: 5.2811e-06 - val_loss: 0.0044 - val_mse: 4.4364e-06
Wrote model to dpos_cnn_weights_400.hdf
Epoch 402/402
1552/1552 [==============================] - 911s - loss: 0.0053 - mse: 5.2882e-06 - val_loss: 0.0045 - val_mse: 4.5077e-06
Wrote model to dpos_cnn_weights_401.hdf
Epoch 403/403
1552/1552 [==============================] - 911s - loss: 0.0053 - mse: 5.2869e-06 - val_loss: 0.0044 - val_mse: 4.4123e-06
Wrote model to dpos_cnn_weights_402.hdf
Epoch 404/404
1552/1552 [==============================] - 911s - loss: 0.0053 - mse: 5.2928e-06 - val_loss: 0.0043 - val_mse: 4.3089e-06
Wrote model to dpos_cnn_weights_403.hdf
Epoch 405/405
1552/1552 [==============================] - 911s - loss: 0.0053 - mse: 5.2662e-06 - val_loss: 0.0044 - val_mse: 4.4222e-06
Wrote model to dpos_cnn_weights_404.hdf
Epoch 406/406
1552/1552 [==============================] - 912s - loss: 0.0053 - mse: 5.2609e-06 - val_loss: 0.0044 - val_mse: 4.4431e-06
Wrote model to dpos_cnn_weights_405.hdf
Epoch 407/407
1552/1552 [==============================] - 911s - loss: 0.0053 - mse: 5.2788e-06 - val_loss: 0.0044 - val_mse: 4.4353e-06
Wrote model to dpos_cnn_weights_406.hdf
Epoch 408/408
1552/1552 [==============================] - 912s - loss: 0.0053 - mse: 5.2584e-06 - val_loss: 0.0044 - val_mse: 4.4025e-06
Wrote model to dpos_cnn_weights_407.hdf
Epoch 409/409
1552/1552 [==============================] - 912s - loss: 0.0052 - mse: 5.2470e-06 - val_loss: 0.0045 - val_mse: 4.5196e-06
Wrote model to dpos_cnn_weights_408.hdf
Epoch 410/410
1552/1552 [==============================] - 912s - loss: 0.0052 - mse: 5.2012e-06 - val_loss: 0.0045 - val_mse: 4.5104e-06
Wrote model to dpos_cnn_weights_409.hdf
Epoch 411/411
1552/1552 [==============================] - 912s - loss: 0.0053 - mse: 5.2779e-06 - val_loss: 0.0044 - val_mse: 4.3547e-06
Wrote model to dpos_cnn_weights_410.hdf
Epoch 412/412
1552/1552 [==============================] - 912s - loss: 0.0053 - mse: 5.2901e-06 - val_loss: 0.0044 - val_mse: 4.3535e-06
Wrote model to dpos_cnn_weights_411.hdf
Epoch 413/413
1552/1552 [==============================] - 912s - loss: 0.0052 - mse: 5.2210e-06 - val_loss: 0.0044 - val_mse: 4.4061e-06
Wrote model to dpos_cnn_weights_412.hdf
Epoch 414/414
1552/1552 [==============================] - 912s - loss: 0.0052 - mse: 5.2215e-06 - val_loss: 0.0045 - val_mse: 4.4514e-06
Wrote model to dpos_cnn_weights_413.hdf
Epoch 415/415
1552/1552 [==============================] - 912s - loss: 0.0053 - mse: 5.2652e-06 - val_loss: 0.0044 - val_mse: 4.4122e-06
Wrote model to dpos_cnn_weights_414.hdf
Epoch 416/416
1552/1552 [==============================] - 912s - loss: 0.0053 - mse: 5.2500e-06 - val_loss: 0.0044 - val_mse: 4.4109e-06
Wrote model to dpos_cnn_weights_415.hdf
Epoch 417/417
1552/1552 [==============================] - 912s - loss: 0.0053 - mse: 5.2890e-06 - val_loss: 0.0045 - val_mse: 4.4718e-06
Wrote model to dpos_cnn_weights_416.hdf
Epoch 418/418
1552/1552 [==============================] - 912s - loss: 0.0053 - mse: 5.2651e-06 - val_loss: 0.0044 - val_mse: 4.3656e-06
Wrote model to dpos_cnn_weights_417.hdf
Epoch 419/419
1552/1552 [==============================] - 912s - loss: 0.0052 - mse: 5.2231e-06 - val_loss: 0.0044 - val_mse: 4.3892e-06
Wrote model to dpos_cnn_weights_418.hdf
Epoch 420/420
1552/1552 [==============================] - 913s - loss: 0.0053 - mse: 5.2557e-06 - val_loss: 0.0044 - val_mse: 4.3559e-06
Wrote model to dpos_cnn_weights_419.hdf
Epoch 421/421
1552/1552 [==============================] - 914s - loss: 0.0052 - mse: 5.2200e-06 - val_loss: 0.0043 - val_mse: 4.3414e-06
Wrote model to dpos_cnn_weights_420.hdf
Epoch 422/422
1552/1552 [==============================] - 914s - loss: 0.0053 - mse: 5.2683e-06 - val_loss: 0.0043 - val_mse: 4.3424e-06
Wrote model to dpos_cnn_weights_421.hdf
Epoch 423/423
1552/1552 [==============================] - 915s - loss: 0.0052 - mse: 5.2332e-06 - val_loss: 0.0044 - val_mse: 4.4384e-06
Wrote model to dpos_cnn_weights_422.hdf
Epoch 424/424
1552/1552 [==============================] - 915s - loss: 0.0052 - mse: 5.2426e-06 - val_loss: 0.0044 - val_mse: 4.4060e-06
Wrote model to dpos_cnn_weights_423.hdf
Epoch 425/425
1552/1552 [==============================] - 916s - loss: 0.0052 - mse: 5.2169e-06 - val_loss: 0.0044 - val_mse: 4.3591e-06
Wrote model to dpos_cnn_weights_424.hdf
Epoch 426/426
1552/1552 [==============================] - 915s - loss: 0.0052 - mse: 5.1830e-06 - val_loss: 0.0044 - val_mse: 4.3752e-06
Wrote model to dpos_cnn_weights_425.hdf
Epoch 427/427
1552/1552 [==============================] - 915s - loss: 0.0052 - mse: 5.2055e-06 - val_loss: 0.0044 - val_mse: 4.3640e-06
Wrote model to dpos_cnn_weights_426.hdf
Epoch 428/428
1552/1552 [==============================] - 914s - loss: 0.0052 - mse: 5.2283e-06 - val_loss: 0.0044 - val_mse: 4.3569e-06
Wrote model to dpos_cnn_weights_427.hdf
Epoch 429/429
1552/1552 [==============================] - 915s - loss: 0.0052 - mse: 5.2077e-06 - val_loss: 0.0045 - val_mse: 4.4814e-06
Wrote model to dpos_cnn_weights_428.hdf
Epoch 430/430
1552/1552 [==============================] - 912s - loss: 0.0052 - mse: 5.2083e-06 - val_loss: 0.0045 - val_mse: 4.4809e-06
Wrote model to dpos_cnn_weights_429.hdf
Epoch 431/431
1552/1552 [==============================] - 912s - loss: 0.0052 - mse: 5.2116e-06 - val_loss: 0.0043 - val_mse: 4.3402e-06
Wrote model to dpos_cnn_weights_430.hdf
Epoch 432/432
1552/1552 [==============================] - 912s - loss: 0.0052 - mse: 5.1834e-06 - val_loss: 0.0044 - val_mse: 4.4220e-06
Wrote model to dpos_cnn_weights_431.hdf
Epoch 433/433
1552/1552 [==============================] - 912s - loss: 0.0052 - mse: 5.1852e-06 - val_loss: 0.0045 - val_mse: 4.4572e-06
Wrote model to dpos_cnn_weights_432.hdf
Epoch 434/434
1552/1552 [==============================] - 912s - loss: 0.0052 - mse: 5.2360e-06 - val_loss: 0.0045 - val_mse: 4.4587e-06
Wrote model to dpos_cnn_weights_433.hdf
Epoch 435/435
1552/1552 [==============================] - 912s - loss: 0.0052 - mse: 5.2103e-06 - val_loss: 0.0044 - val_mse: 4.4109e-06
Wrote model to dpos_cnn_weights_434.hdf
Epoch 436/436
1552/1552 [==============================] - 912s - loss: 0.0052 - mse: 5.1776e-06 - val_loss: 0.0044 - val_mse: 4.4242e-06
Wrote model to dpos_cnn_weights_435.hdf
Epoch 437/437
1552/1552 [==============================] - 912s - loss: 0.0052 - mse: 5.2111e-06 - val_loss: 0.0044 - val_mse: 4.3786e-06
Wrote model to dpos_cnn_weights_436.hdf
Epoch 438/438
1552/1552 [==============================] - 911s - loss: 0.0052 - mse: 5.2380e-06 - val_loss: 0.0045 - val_mse: 4.4794e-06
Wrote model to dpos_cnn_weights_437.hdf
Epoch 439/439
1552/1552 [==============================] - 911s - loss: 0.0052 - mse: 5.1789e-06 - val_loss: 0.0044 - val_mse: 4.4008e-06
Wrote model to dpos_cnn_weights_438.hdf
Epoch 440/440
1552/1552 [==============================] - 911s - loss: 0.0052 - mse: 5.1953e-06 - val_loss: 0.0046 - val_mse: 4.6087e-06
Wrote model to dpos_cnn_weights_439.hdf
Epoch 441/441
1552/1552 [==============================] - 912s - loss: 0.0052 - mse: 5.1770e-06 - val_loss: 0.0044 - val_mse: 4.4008e-06
Wrote model to dpos_cnn_weights_440.hdf
Epoch 442/442
1552/1552 [==============================] - 911s - loss: 0.0053 - mse: 5.2633e-06 - val_loss: 0.0044 - val_mse: 4.4148e-06
Wrote model to dpos_cnn_weights_441.hdf
Epoch 443/443
1552/1552 [==============================] - 912s - loss: 0.0052 - mse: 5.1512e-06 - val_loss: 0.0045 - val_mse: 4.5317e-06
Wrote model to dpos_cnn_weights_442.hdf
Epoch 444/444
1552/1552 [==============================] - 912s - loss: 0.0052 - mse: 5.1561e-06 - val_loss: 0.0045 - val_mse: 4.4757e-06
Wrote model to dpos_cnn_weights_443.hdf
Epoch 445/445
1552/1552 [==============================] - 912s - loss: 0.0052 - mse: 5.1610e-06 - val_loss: 0.0044 - val_mse: 4.3713e-06
Wrote model to dpos_cnn_weights_444.hdf
Epoch 446/446
1552/1552 [==============================] - 912s - loss: 0.0051 - mse: 5.1219e-06 - val_loss: 0.0044 - val_mse: 4.4388e-06
Wrote model to dpos_cnn_weights_445.hdf
Epoch 447/447
1552/1552 [==============================] - 912s - loss: 0.0052 - mse: 5.2143e-06 - val_loss: 0.0044 - val_mse: 4.4308e-06
Wrote model to dpos_cnn_weights_446.hdf
Epoch 448/448
1552/1552 [==============================] - 912s - loss: 0.0052 - mse: 5.2117e-06 - val_loss: 0.0043 - val_mse: 4.3429e-06
Wrote model to dpos_cnn_weights_447.hdf
Epoch 449/449
1552/1552 [==============================] - 912s - loss: 0.0051 - mse: 5.0724e-06 - val_loss: 0.0044 - val_mse: 4.4433e-06
Wrote model to dpos_cnn_weights_448.hdf
Epoch 450/450
1552/1552 [==============================] - 912s - loss: 0.0051 - mse: 5.1351e-06 - val_loss: 0.0044 - val_mse: 4.4382e-06
Wrote model to dpos_cnn_weights_449.hdf
Epoch 451/451
1552/1552 [==============================] - 912s - loss: 0.0052 - mse: 5.2474e-06 - val_loss: 0.0045 - val_mse: 4.4679e-06
Wrote model to dpos_cnn_weights_450.hdf
Epoch 452/452
1552/1552 [==============================] - 912s - loss: 0.0051 - mse: 5.0920e-06 - val_loss: 0.0044 - val_mse: 4.4066e-06
Wrote model to dpos_cnn_weights_451.hdf
Epoch 453/453
1552/1552 [==============================] - 911s - loss: 0.0052 - mse: 5.1503e-06 - val_loss: 0.0044 - val_mse: 4.4114e-06
Wrote model to dpos_cnn_weights_452.hdf
Epoch 454/454
1552/1552 [==============================] - 911s - loss: 0.0051 - mse: 5.1136e-06 - val_loss: 0.0045 - val_mse: 4.4685e-06
Wrote model to dpos_cnn_weights_453.hdf
Epoch 455/455
1552/1552 [==============================] - 911s - loss: 0.0052 - mse: 5.1611e-06 - val_loss: 0.0046 - val_mse: 4.5954e-06
Wrote model to dpos_cnn_weights_454.hdf
Epoch 456/456
1552/1552 [==============================] - 911s - loss: 0.0051 - mse: 5.1211e-06 - val_loss: 0.0044 - val_mse: 4.4261e-06
Wrote model to dpos_cnn_weights_455.hdf
Epoch 457/457
1552/1552 [==============================] - 912s - loss: 0.0051 - mse: 5.1140e-06 - val_loss: 0.0045 - val_mse: 4.4742e-06
Wrote model to dpos_cnn_weights_456.hdf
Epoch 458/458
1552/1552 [==============================] - 912s - loss: 0.0051 - mse: 5.1222e-06 - val_loss: 0.0047 - val_mse: 4.6590e-06
Wrote model to dpos_cnn_weights_457.hdf
Epoch 459/459
1552/1552 [==============================] - 912s - loss: 0.0051 - mse: 5.1171e-06 - val_loss: 0.0043 - val_mse: 4.3336e-06
Wrote model to dpos_cnn_weights_458.hdf
Epoch 460/460
1552/1552 [==============================] - 911s - loss: 0.0051 - mse: 5.0986e-06 - val_loss: 0.0044 - val_mse: 4.3749e-06
Wrote model to dpos_cnn_weights_459.hdf
Epoch 461/461
1552/1552 [==============================] - 912s - loss: 0.0052 - mse: 5.1602e-06 - val_loss: 0.0044 - val_mse: 4.4459e-06
Wrote model to dpos_cnn_weights_460.hdf
Epoch 462/462
1552/1552 [==============================] - 912s - loss: 0.0052 - mse: 5.1527e-06 - val_loss: 0.0045 - val_mse: 4.4623e-06
Wrote model to dpos_cnn_weights_461.hdf
Epoch 463/463
1552/1552 [==============================] - 912s - loss: 0.0051 - mse: 5.0794e-06 - val_loss: 0.0044 - val_mse: 4.4222e-06
Wrote model to dpos_cnn_weights_462.hdf
Epoch 464/464
1552/1552 [==============================] - 912s - loss: 0.0051 - mse: 5.1300e-06 - val_loss: 0.0045 - val_mse: 4.5419e-06
Wrote model to dpos_cnn_weights_463.hdf
Epoch 465/465
1552/1552 [==============================] - 912s - loss: 0.0051 - mse: 5.0996e-06 - val_loss: 0.0046 - val_mse: 4.6372e-06
Wrote model to dpos_cnn_weights_464.hdf
Epoch 466/466
1552/1552 [==============================] - 912s - loss: 0.0052 - mse: 5.1733e-06 - val_loss: 0.0044 - val_mse: 4.4422e-06
Wrote model to dpos_cnn_weights_465.hdf
Epoch 467/467
1552/1552 [==============================] - 912s - loss: 0.0051 - mse: 5.1265e-06 - val_loss: 0.0045 - val_mse: 4.4890e-06
Wrote model to dpos_cnn_weights_466.hdf
Epoch 468/468
1552/1552 [==============================] - 912s - loss: 0.0051 - mse: 5.1354e-06 - val_loss: 0.0044 - val_mse: 4.4032e-06
Wrote model to dpos_cnn_weights_467.hdf
Epoch 469/469
1552/1552 [==============================] - 911s - loss: 0.0051 - mse: 5.0790e-06 - val_loss: 0.0045 - val_mse: 4.4692e-06
Wrote model to dpos_cnn_weights_468.hdf
Epoch 470/470
1552/1552 [==============================] - 912s - loss: 0.0051 - mse: 5.0882e-06 - val_loss: 0.0048 - val_mse: 4.7966e-06
Wrote model to dpos_cnn_weights_469.hdf
Epoch 471/471
1552/1552 [==============================] - 912s - loss: 0.0051 - mse: 5.1272e-06 - val_loss: 0.0046 - val_mse: 4.6076e-06
Wrote model to dpos_cnn_weights_470.hdf
Epoch 472/472
1552/1552 [==============================] - 911s - loss: 0.0051 - mse: 5.0749e-06 - val_loss: 0.0044 - val_mse: 4.3953e-06
Wrote model to dpos_cnn_weights_471.hdf
Epoch 473/473
1552/1552 [==============================] - 911s - loss: 0.0051 - mse: 5.1291e-06 - val_loss: 0.0044 - val_mse: 4.3760e-06
Wrote model to dpos_cnn_weights_472.hdf
Epoch 474/474
1552/1552 [==============================] - 911s - loss: 0.0051 - mse: 5.0730e-06 - val_loss: 0.0044 - val_mse: 4.3823e-06
Wrote model to dpos_cnn_weights_473.hdf
Epoch 475/475
1552/1552 [==============================] - 912s - loss: 0.0051 - mse: 5.1082e-06 - val_loss: 0.0045 - val_mse: 4.5175e-06
Wrote model to dpos_cnn_weights_474.hdf
Epoch 476/476
1552/1552 [==============================] - 911s - loss: 0.0051 - mse: 5.0562e-06 - val_loss: 0.0044 - val_mse: 4.3711e-06
Wrote model to dpos_cnn_weights_475.hdf
Epoch 477/477
1552/1552 [==============================] - 912s - loss: 0.0051 - mse: 5.0820e-06 - val_loss: 0.0046 - val_mse: 4.5824e-06
Wrote model to dpos_cnn_weights_476.hdf
Epoch 478/478
1552/1552 [==============================] - 912s - loss: 0.0051 - mse: 5.0560e-06 - val_loss: 0.0045 - val_mse: 4.4895e-06
Wrote model to dpos_cnn_weights_477.hdf
Epoch 479/479
1552/1552 [==============================] - 911s - loss: 0.0050 - mse: 5.0438e-06 - val_loss: 0.0044 - val_mse: 4.4219e-06
Wrote model to dpos_cnn_weights_478.hdf
Epoch 480/480
1552/1552 [==============================] - 911s - loss: 0.0050 - mse: 5.0234e-06 - val_loss: 0.0044 - val_mse: 4.3722e-06
Wrote model to dpos_cnn_weights_479.hdf
Epoch 481/481
1552/1552 [==============================] - 912s - loss: 0.0050 - mse: 5.0319e-06 - val_loss: 0.0044 - val_mse: 4.4293e-06
Wrote model to dpos_cnn_weights_480.hdf
Epoch 482/482
1552/1552 [==============================] - 911s - loss: 0.0051 - mse: 5.0526e-06 - val_loss: 0.0045 - val_mse: 4.5293e-06
Wrote model to dpos_cnn_weights_481.hdf
Epoch 483/483
1552/1552 [==============================] - 911s - loss: 0.0051 - mse: 5.0703e-06 - val_loss: 0.0045 - val_mse: 4.5012e-06
Wrote model to dpos_cnn_weights_482.hdf
Epoch 484/484
1552/1552 [==============================] - 912s - loss: 0.0050 - mse: 5.0124e-06 - val_loss: 0.0046 - val_mse: 4.5534e-06
Wrote model to dpos_cnn_weights_483.hdf
Epoch 485/485
1552/1552 [==============================] - 911s - loss: 0.0051 - mse: 5.0757e-06 - val_loss: 0.0046 - val_mse: 4.6120e-06
Wrote model to dpos_cnn_weights_484.hdf
Epoch 486/486
1552/1552 [==============================] - 911s - loss: 0.0051 - mse: 5.0703e-06 - val_loss: 0.0044 - val_mse: 4.4429e-06
Wrote model to dpos_cnn_weights_485.hdf
Epoch 487/487
1552/1552 [==============================] - 911s - loss: 0.0051 - mse: 5.0824e-06 - val_loss: 0.0044 - val_mse: 4.3595e-06
Wrote model to dpos_cnn_weights_486.hdf
Epoch 488/488
1552/1552 [==============================] - 911s - loss: 0.0051 - mse: 5.0640e-06 - val_loss: 0.0046 - val_mse: 4.5658e-06
Wrote model to dpos_cnn_weights_487.hdf
Epoch 489/489
1552/1552 [==============================] - 911s - loss: 0.0051 - mse: 5.0794e-06 - val_loss: 0.0045 - val_mse: 4.4532e-06
Wrote model to dpos_cnn_weights_488.hdf
Epoch 490/490
1552/1552 [==============================] - 912s - loss: 0.0050 - mse: 5.0349e-06 - val_loss: 0.0044 - val_mse: 4.4177e-06
Wrote model to dpos_cnn_weights_489.hdf
Epoch 491/491
1552/1552 [==============================] - 912s - loss: 0.0050 - mse: 5.0457e-06 - val_loss: 0.0046 - val_mse: 4.5785e-06
Wrote model to dpos_cnn_weights_490.hdf
Epoch 492/492
1552/1552 [==============================] - 912s - loss: 0.0051 - mse: 5.0623e-06 - val_loss: 0.0046 - val_mse: 4.5799e-06
Wrote model to dpos_cnn_weights_491.hdf
Epoch 493/493
1552/1552 [==============================] - 912s - loss: 0.0050 - mse: 5.0268e-06 - val_loss: 0.0044 - val_mse: 4.3844e-06
Wrote model to dpos_cnn_weights_492.hdf
Epoch 494/494
1552/1552 [==============================] - 911s - loss: 0.0050 - mse: 5.0095e-06 - val_loss: 0.0045 - val_mse: 4.4519e-06
Wrote model to dpos_cnn_weights_493.hdf
Epoch 495/495
1552/1552 [==============================] - 912s - loss: 0.0051 - mse: 5.0866e-06 - val_loss: 0.0045 - val_mse: 4.5465e-06
Wrote model to dpos_cnn_weights_494.hdf
Epoch 496/496
1552/1552 [==============================] - 911s - loss: 0.0051 - mse: 5.0958e-06 - val_loss: 0.0045 - val_mse: 4.4502e-06
Wrote model to dpos_cnn_weights_495.hdf
Epoch 497/497
1552/1552 [==============================] - 911s - loss: 0.0050 - mse: 5.0353e-06 - val_loss: 0.0045 - val_mse: 4.4519e-06
Wrote model to dpos_cnn_weights_496.hdf
Epoch 498/498
1552/1552 [==============================] - 911s - loss: 0.0050 - mse: 5.0219e-06 - val_loss: 0.0044 - val_mse: 4.4017e-06
Wrote model to dpos_cnn_weights_497.hdf
Epoch 499/499
1552/1552 [==============================] - 911s - loss: 0.0050 - mse: 5.0453e-06 - val_loss: 0.0046 - val_mse: 4.5692e-06
Wrote model to dpos_cnn_weights_498.hdf
Epoch 500/500
1552/1552 [==============================] - 911s - loss: 0.0050 - mse: 4.9895e-06 - val_loss: 0.0045 - val_mse: 4.4650e-06
Wrote model to dpos_cnn_weights_499.hdf
Epoch 501/501
1552/1552 [==============================] - 911s - loss: 0.0050 - mse: 5.0442e-06 - val_loss: 0.0045 - val_mse: 4.4775e-06
Wrote model to dpos_cnn_weights_500.hdf
Epoch 502/502
1552/1552 [==============================] - 911s - loss: 0.0050 - mse: 5.0091e-06 - val_loss: 0.0045 - val_mse: 4.4937e-06
Wrote model to dpos_cnn_weights_501.hdf
Epoch 503/503
1552/1552 [==============================] - 912s - loss: 0.0050 - mse: 5.0020e-06 - val_loss: 0.0045 - val_mse: 4.5243e-06
Wrote model to dpos_cnn_weights_502.hdf
Epoch 504/504
1552/1552 [==============================] - 911s - loss: 0.0051 - mse: 5.0501e-06 - val_loss: 0.0046 - val_mse: 4.6166e-06
Wrote model to dpos_cnn_weights_503.hdf
Epoch 505/505
1552/1552 [==============================] - 911s - loss: 0.0051 - mse: 5.0733e-06 - val_loss: 0.0045 - val_mse: 4.4818e-06
Wrote model to dpos_cnn_weights_504.hdf
Epoch 506/506
1552/1552 [==============================] - 911s - loss: 0.0050 - mse: 5.0417e-06 - val_loss: 0.0044 - val_mse: 4.4414e-06
Wrote model to dpos_cnn_weights_505.hdf
Epoch 507/507
1552/1552 [==============================] - 911s - loss: 0.0050 - mse: 5.0245e-06 - val_loss: 0.0046 - val_mse: 4.5818e-06
Wrote model to dpos_cnn_weights_506.hdf
Epoch 508/508
1552/1552 [==============================] - 911s - loss: 0.0050 - mse: 4.9563e-06 - val_loss: 0.0046 - val_mse: 4.5704e-06
Wrote model to dpos_cnn_weights_507.hdf
Epoch 509/509
1552/1552 [==============================] - 911s - loss: 0.0050 - mse: 5.0475e-06 - val_loss: 0.0045 - val_mse: 4.4592e-06
Wrote model to dpos_cnn_weights_508.hdf
Epoch 510/510
1552/1552 [==============================] - 911s - loss: 0.0050 - mse: 4.9995e-06 - val_loss: 0.0051 - val_mse: 5.1238e-06
Wrote model to dpos_cnn_weights_509.hdf
Epoch 511/511
1552/1552 [==============================] - 911s - loss: 0.0050 - mse: 4.9945e-06 - val_loss: 0.0045 - val_mse: 4.4728e-06
Wrote model to dpos_cnn_weights_510.hdf
Epoch 512/512
1552/1552 [==============================] - 910s - loss: 0.0050 - mse: 5.0265e-06 - val_loss: 0.0045 - val_mse: 4.5234e-06
Wrote model to dpos_cnn_weights_511.hdf
Epoch 513/513
1552/1552 [==============================] - 911s - loss: 0.0050 - mse: 4.9671e-06 - val_loss: 0.0044 - val_mse: 4.3597e-06
Wrote model to dpos_cnn_weights_512.hdf
Epoch 514/514
1552/1552 [==============================] - 911s - loss: 0.0050 - mse: 5.0029e-06 - val_loss: 0.0044 - val_mse: 4.4276e-06
Wrote model to dpos_cnn_weights_513.hdf
Epoch 515/515
1552/1552 [==============================] - 911s - loss: 0.0050 - mse: 5.0206e-06 - val_loss: 0.0045 - val_mse: 4.5204e-06
Wrote model to dpos_cnn_weights_514.hdf
Epoch 516/516
1552/1552 [==============================] - 911s - loss: 0.0050 - mse: 5.0173e-06 - val_loss: 0.0045 - val_mse: 4.5211e-06
Wrote model to dpos_cnn_weights_515.hdf
Epoch 517/517
1552/1552 [==============================] - 911s - loss: 0.0050 - mse: 5.0222e-06 - val_loss: 0.0046 - val_mse: 4.5810e-06
Wrote model to dpos_cnn_weights_516.hdf
Epoch 518/518
1552/1552 [==============================] - 913s - loss: 0.0050 - mse: 5.0205e-06 - val_loss: 0.0045 - val_mse: 4.5051e-06
Wrote model to dpos_cnn_weights_517.hdf
Epoch 519/519
1552/1552 [==============================] - 912s - loss: 0.0050 - mse: 5.0014e-06 - val_loss: 0.0044 - val_mse: 4.4447e-06
Wrote model to dpos_cnn_weights_518.hdf
Epoch 520/520
1552/1552 [==============================] - 912s - loss: 0.0050 - mse: 4.9657e-06 - val_loss: 0.0046 - val_mse: 4.6116e-06
Wrote model to dpos_cnn_weights_519.hdf
Epoch 521/521
1552/1552 [==============================] - 911s - loss: 0.0050 - mse: 4.9639e-06 - val_loss: 0.0044 - val_mse: 4.3979e-06
Wrote model to dpos_cnn_weights_520.hdf
Epoch 522/522
1552/1552 [==============================] - 911s - loss: 0.0050 - mse: 5.0245e-06 - val_loss: 0.0044 - val_mse: 4.4082e-06
Wrote model to dpos_cnn_weights_521.hdf
Epoch 523/523
1552/1552 [==============================] - 910s - loss: 0.0050 - mse: 5.0233e-06 - val_loss: 0.0047 - val_mse: 4.6683e-06
Wrote model to dpos_cnn_weights_522.hdf
Epoch 524/524
1552/1552 [==============================] - 911s - loss: 0.0050 - mse: 4.9877e-06 - val_loss: 0.0045 - val_mse: 4.4924e-06
Wrote model to dpos_cnn_weights_523.hdf
Epoch 525/525
1552/1552 [==============================] - 911s - loss: 0.0050 - mse: 4.9801e-06 - val_loss: 0.0046 - val_mse: 4.6203e-06
Wrote model to dpos_cnn_weights_524.hdf
Epoch 526/526
1552/1552 [==============================] - 910s - loss: 0.0050 - mse: 4.9873e-06 - val_loss: 0.0045 - val_mse: 4.5120e-06
Wrote model to dpos_cnn_weights_525.hdf
Epoch 527/527
1552/1552 [==============================] - 910s - loss: 0.0050 - mse: 5.0166e-06 - val_loss: 0.0045 - val_mse: 4.4868e-06
Wrote model to dpos_cnn_weights_526.hdf
Epoch 528/528
1552/1552 [==============================] - 910s - loss: 0.0050 - mse: 4.9772e-06 - val_loss: 0.0046 - val_mse: 4.5582e-06
Wrote model to dpos_cnn_weights_527.hdf
Epoch 529/529
1552/1552 [==============================] - 910s - loss: 0.0050 - mse: 4.9841e-06 - val_loss: 0.0045 - val_mse: 4.5462e-06
Wrote model to dpos_cnn_weights_528.hdf
Epoch 530/530
1552/1552 [==============================] - 911s - loss: 0.0049 - mse: 4.9447e-06 - val_loss: 0.0046 - val_mse: 4.5850e-06
Wrote model to dpos_cnn_weights_529.hdf
Epoch 531/531
1552/1552 [==============================] - 910s - loss: 0.0050 - mse: 4.9685e-06 - val_loss: 0.0046 - val_mse: 4.5695e-06
Wrote model to dpos_cnn_weights_530.hdf
Epoch 532/532
1552/1552 [==============================] - 910s - loss: 0.0050 - mse: 4.9914e-06 - val_loss: 0.0044 - val_mse: 4.4303e-06
Wrote model to dpos_cnn_weights_531.hdf
Epoch 533/533
1552/1552 [==============================] - 910s - loss: 0.0050 - mse: 5.0073e-06 - val_loss: 0.0045 - val_mse: 4.4904e-06
Wrote model to dpos_cnn_weights_532.hdf
Epoch 534/534
1552/1552 [==============================] - 910s - loss: 0.0050 - mse: 5.0130e-06 - val_loss: 0.0044 - val_mse: 4.4333e-06
Wrote model to dpos_cnn_weights_533.hdf
Epoch 535/535
 106/1552 [=>............................] - ETA: 800s - loss: 0.0015 - mse: 1.4502e-06
User stopped the training.

In [12]:
# Load the best position prediction model as the starting point
epoch = 389
WEIGHTS_FNAME = 'dpos_cnn_weights_%i.hdf'
model.load_weights(WEIGHTS_FNAME % epoch)
print("Loaded model.")


# Plot the real versus predicted values for some of the validation data
(frames, outputs_true) = next(ValidateBatchGenDpos(2000))
plot_validate(model, frames, outputs_true, "Difference in position")


Loaded model.
2000/2000 [==============================] - 18s    
Predicted.
array([[ 0.04848485,  0.        ,  0.05405406],
       [ 0.02424242, -0.00409836,  0.0972973 ],
       [ 0.00606061, -0.01639344,  0.09189189],
       ..., 
       [-0.02424242, -0.00819672,  0.        ],
       [-0.04242424, -0.00819672,  0.        ],
       [-0.06666667, -0.01229508,  0.        ]], dtype=float32)
array([[-0.00020345,  0.01398446,  0.06069022],
       [-0.00074252,  0.01234826,  0.05628744],
       [ 0.00113529,  0.0202578 ,  0.0821106 ],
       ..., 
       [ 0.01052168,  0.01274287,  0.00568438],
       [ 0.00211596,  0.00136408, -0.0037094 ],
       [ 0.00187254,  0.00118708, -0.00253951]], dtype=float32)