In [1]:
import csv
from matplotlib import pyplot as plt
import numpy as np
import seaborn as sns
from scipy.misc import imread, imsave
import pickle
from keras.models import Sequential
from keras.layers.core import Dense, Activation, Flatten, Dropout, Lambda
from keras.layers import concatenate, Input
from keras.models import Model
from keras.layers.convolutional import Convolution2D
from keras.layers.pooling import MaxPooling2D


Using TensorFlow backend.

In [2]:
%matplotlib inline
sns.set_context("poster")

In [3]:
X_train = []
XX_train = []
y_train = []

drives = ['driving_track_1_train_1.pkl',
         'driving_track_1_train_2.pkl',
         'driving_track_1_train_3.pkl',
         'driving_track_2_train_4.pkl',
         'driving_track_2_train_5.pkl',
         'driving_track_2_train_6.pkl']
for drive in drives:
    print('processing: ', drive)
    with open(drive, 'rb') as f:
        data = pickle.load(f)
    
    # add in the left and right version of this, not for sensor data, we just have 
    # to add it twice
    XX_train.extend(data['sensors'].astype(np.float64))
    XX_train.extend(data['sensors'].astype(np.float64))
    X_train.extend(data['images'])
    y_train.extend(data['steering_throttle'].astype(np.float64))
    X_train.extend(np.array([np.fliplr(x) for x in data['images']]))
    y_train.extend(np.negative(data['steering_throttle'].astype(np.float64)))
    
X_train = np.array(X_train)
XX_train = np.array(XX_train)[:,0:1] #only using the first var at this point
y_train = np.array(y_train)


processing:  driving_track_1_train_1.pkl
processing:  driving_track_1_train_2.pkl
processing:  driving_track_1_train_3.pkl
processing:  driving_track_2_train_4.pkl
processing:  driving_track_2_train_5.pkl
processing:  driving_track_2_train_6.pkl

In [4]:
print(X_train.shape, XX_train.shape, y_train.shape)


(112272, 80, 320, 3) (112272, 1) (112272, 2)

In [5]:
#view some images
plt.imshow(X_train[0])


Out[5]:
<matplotlib.image.AxesImage at 0x7fa175719f60>
/root/anaconda3/lib/python3.6/site-packages/matplotlib/font_manager.py:1297: UserWarning: findfont: Font family ['sans-serif'] not found. Falling back to DejaVu Sans
  (prop.get_family(), self.defaultFamily[fontext]))

In [21]:
def create_model():
    model = Sequential()
    
    image_input = Input(shape=(80, 320, 3), name='image_input', dtype='float32')
    sensor_input = Input(shape=(1,), name='sensor_input', dtype='float32')
    
    # preprocess
    X = Lambda(lambda x: x/255.0 - 0.5, input_shape=(80, 320, 3))(image_input)
    # conv1 layer
    X = Convolution2D(64, (5, 5))(X)
    X = MaxPooling2D((2, 2))(X)
    X = Activation('relu')(X)
    
    # conv2 layer
    X = Convolution2D(64, (5, 5))(X)
    X = MaxPooling2D((2, 2))(X)
    X = Activation('relu')(X)
    
    # conv3 layer
    X = Convolution2D(64, (3, 3))(X)
    X = MaxPooling2D((2, 2))(X)
    X = Activation('relu')(X)
    
    # conv4 layer
    X = Convolution2D(64, (3, 3))(X)
    X = MaxPooling2D((2, 2))(X)
    X = Activation('relu')(X)

    
    X = Flatten()(X) #Flatten input image
    
    #add in the speed, here we may add in other variables such 
    # as the last several throttle / speed/ steering angles, and other sensors
    X = concatenate([X, sensor_input])
    
        
    #add fully connected layers
    
    # fc1
    X = Dense(256)(X)
    X = Dropout(0.5)(X)
    X = Activation('relu')(X)
    
    # fc2
    X = Dense(128)(X)
    X = Dropout(0.5)(X)
    X = Activation('relu')(X)
    
    # fc2
    X = Dense(64)(X)
    X = Dropout(0.5)(X)
    X = Activation('relu')(X)
    
    steer_output = Dense(1, name='steer_output')(X) #output layer with 
    throttle_output = Dense(1, name='throttle_output')(X)
    #model = Model(inputs=[image_input, sensor_input], outputs=[steer_output, throttle_output])
    model = Model(inputs=[image_input, sensor_input], outputs=[steer_output])

    
    # note, setting the loss weight to favor steering
    model.compile(optimizer='adam',
              #loss={'steer_output': 'mse', 'throttle_output': 'mse'},
              loss={'steer_output': 'mse'},
              #loss_weights={'steer_output': 1., 'throttle_output': 0.2})
              loss_weights={'steer_output': 1.0})
    
    return model

In [22]:
# estimator = KerasRegressor(build_fn=lambda: model, epochs=5, shuffle=True, batch_size=128, verbose=1)
# kfold = KFold(n_splits=3)
# results = cross_val_score(estimator, X_train, y_train, cv=kfold)

# print("Results: %.2f (%.2f) MSE" % (results.mean(), results.std()))

In [23]:
model = create_model()

In [ ]:
# train the model and save it each eopch
for i in range(20):
#     model.fit([X_train, XX_train], [y_train[:,[0]], y_train[:,[1]]], shuffle=True, epochs=1, validation_split=.2, batch_size=64)
    model.fit(
        {'image_input': X_train, 'sensor_input': XX_train},
        {'steer_output': y_train[:,[0]], 'throttle_output': y_train[:,[1]]}, 
        shuffle=True, epochs=1, validation_split=.2, batch_size=64)
    
    model.save("data_model_with_speed{}.h5".format(i))


Train on 89817 samples, validate on 22455 samples
Epoch 1/1
89817/89817 [==============================] - 430s - loss: 0.0565 - val_loss: 0.0785
Train on 89817 samples, validate on 22455 samples
Epoch 1/1
89817/89817 [==============================] - 428s - loss: 0.0335 - val_loss: 0.0701
Train on 89817 samples, validate on 22455 samples
Epoch 1/1
89817/89817 [==============================] - 427s - loss: 0.0291 - val_loss: 0.0657
Train on 89817 samples, validate on 22455 samples
Epoch 1/1
89817/89817 [==============================] - 426s - loss: 0.0262 - val_loss: 0.0634
Train on 89817 samples, validate on 22455 samples
Epoch 1/1
89817/89817 [==============================] - 426s - loss: 0.0241 - val_loss: 0.0565
Train on 89817 samples, validate on 22455 samples
Epoch 1/1
89817/89817 [==============================] - 425s - loss: 0.0228 - val_loss: 0.0571
Train on 89817 samples, validate on 22455 samples
Epoch 1/1
89817/89817 [==============================] - 425s - loss: 0.0212 - val_loss: 0.0555
Train on 89817 samples, validate on 22455 samples
Epoch 1/1
89817/89817 [==============================] - 425s - loss: 0.0203 - val_loss: 0.0626
Train on 89817 samples, validate on 22455 samples
Epoch 1/1
89817/89817 [==============================] - 424s - loss: 0.0197 - val_loss: 0.0671
Train on 89817 samples, validate on 22455 samples
Epoch 1/1
89817/89817 [==============================] - 424s - loss: 0.0189 - val_loss: 0.0516
Train on 89817 samples, validate on 22455 samples
Epoch 1/1
89817/89817 [==============================] - 424s - loss: 0.0183 - val_loss: 0.0532
Train on 89817 samples, validate on 22455 samples
Epoch 1/1
46464/89817 [==============>...............] - ETA: 191s - loss: 0.0181
---------------------------------------------------------------------------
KeyboardInterrupt                         Traceback (most recent call last)
<ipython-input-24-ffb669ddd09e> in <module>()
      5         {'image_input': X_train, 'sensor_input': XX_train},
      6         {'steer_output': y_train[:,[0]], 'throttle_output': y_train[:,[1]]},
----> 7         shuffle=True, epochs=1, validation_split=.2, batch_size=64)
      8 
      9     model.save("data_model_with_speed{}.h5".format(i))

/root/anaconda3/lib/python3.6/site-packages/keras/engine/training.py in fit(self, x, y, batch_size, epochs, verbose, callbacks, validation_split, validation_data, shuffle, class_weight, sample_weight, initial_epoch, **kwargs)
   1428                               val_f=val_f, val_ins=val_ins, shuffle=shuffle,
   1429                               callback_metrics=callback_metrics,
-> 1430                               initial_epoch=initial_epoch)
   1431 
   1432     def evaluate(self, x, y, batch_size=32, verbose=1, sample_weight=None):

/root/anaconda3/lib/python3.6/site-packages/keras/engine/training.py in _fit_loop(self, f, ins, out_labels, batch_size, epochs, verbose, callbacks, val_f, val_ins, shuffle, callback_metrics, initial_epoch)
   1077                 batch_logs['size'] = len(batch_ids)
   1078                 callbacks.on_batch_begin(batch_index, batch_logs)
-> 1079                 outs = f(ins_batch)
   1080                 if not isinstance(outs, list):
   1081                     outs = [outs]

/root/anaconda3/lib/python3.6/site-packages/keras/backend/tensorflow_backend.py in __call__(self, inputs)
   2266         updated = session.run(self.outputs + [self.updates_op],
   2267                               feed_dict=feed_dict,
-> 2268                               **self.session_kwargs)
   2269         return updated[:len(self.outputs)]
   2270 

/root/anaconda3/lib/python3.6/site-packages/tensorflow/python/client/session.py in run(self, fetches, feed_dict, options, run_metadata)
    787     try:
    788       result = self._run(None, fetches, feed_dict, options_ptr,
--> 789                          run_metadata_ptr)
    790       if run_metadata:
    791         proto_data = tf_session.TF_GetBuffer(run_metadata_ptr)

/root/anaconda3/lib/python3.6/site-packages/tensorflow/python/client/session.py in _run(self, handle, fetches, feed_dict, options, run_metadata)
    995     if final_fetches or final_targets:
    996       results = self._do_run(handle, final_targets, final_fetches,
--> 997                              feed_dict_string, options, run_metadata)
    998     else:
    999       results = []

/root/anaconda3/lib/python3.6/site-packages/tensorflow/python/client/session.py in _do_run(self, handle, target_list, fetch_list, feed_dict, options, run_metadata)
   1130     if handle is None:
   1131       return self._do_call(_run_fn, self._session, feed_dict, fetch_list,
-> 1132                            target_list, options, run_metadata)
   1133     else:
   1134       return self._do_call(_prun_fn, self._session, handle, feed_dict,

/root/anaconda3/lib/python3.6/site-packages/tensorflow/python/client/session.py in _do_call(self, fn, *args)
   1137   def _do_call(self, fn, *args):
   1138     try:
-> 1139       return fn(*args)
   1140     except errors.OpError as e:
   1141       message = compat.as_text(e.message)

/root/anaconda3/lib/python3.6/site-packages/tensorflow/python/client/session.py in _run_fn(session, feed_dict, fetch_list, target_list, options, run_metadata)
   1119         return tf_session.TF_Run(session, options,
   1120                                  feed_dict, fetch_list, target_list,
-> 1121                                  status, run_metadata)
   1122 
   1123     def _prun_fn(session, handle, feed_dict, fetch_list):

KeyboardInterrupt: 

In [30]:
#loss: 0.0199 - val_loss: 0.0819

In [ ]:


In [ ]:


In [ ]: