Map collection

Simple notebook exploring how we can use camera images + position and orientation to capture a map image.


In [346]:
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import skvideo.io
import cv2
import scipy.ndimage
from matplotlib import animation, rc
import fisheye
import encoder_math
from collections import namedtuple
%matplotlib inline

Data

Load data and get key fields. Main stuff needed for this is orientation (heading is x), and the encoder values as difference vectors.


In [29]:
def get_frame(timestamp, fps=30):
    return np.round(timestamp / (1.0 / fps)).astype(int)

In [137]:
df = pd.read_csv('C:/Users/corey/Desktop/output.csv')
df['timestamp'] = df['timestamp'] - df['timestamp'].min()
df['encoder0_count'] = df['encoder0_count'] - df['encoder0_count'].min()
df['encoder1_count'] = df['encoder1_count'] - df['encoder1_count'].min()
df['encoder0_diff'] = df['encoder0_count'].diff()
df['encoder1_diff'] = df['encoder1_count'].diff()
df['frame'] = get_frame(df['timestamp'])
df


Out[137]:
timestamp orientation_x orientation_y orientation_z acceleration_x acceleration_y acceleration_z radio_steering_pwm radio_throttle_pwm encoder0_count encoder1_count steering_servo_voltage encoder0_diff encoder1_diff frame
0 0.0000 357.4375 -4.5000 -1.8750 -0.02 -0.10 0.27 368 202 0 0 490 NaN NaN 0
1 0.0270 357.5000 -4.5000 -1.8750 -0.02 -0.11 0.26 368 202 0 0 490 0.0 0.0 1
2 0.0501 357.5625 -4.4375 -1.9375 -0.04 -0.12 0.28 368 202 0 0 491 0.0 0.0 2
3 0.0841 357.6250 -4.4375 -2.1250 -0.02 -0.13 0.27 368 202 0 0 484 0.0 0.0 3
4 0.1192 357.7500 -4.3750 -2.0000 -0.07 -0.18 0.26 368 202 0 0 483 0.0 0.0 4
5 0.1510 357.8125 -4.3125 -2.1250 -0.01 -0.09 0.28 368 202 0 0 471 0.0 0.0 5
6 0.1830 357.8750 -4.4375 -2.1250 -0.06 -0.22 0.27 368 202 0 0 475 0.0 0.0 5
7 0.2171 357.8125 -4.5000 -2.1250 -0.03 -0.09 0.21 368 202 0 0 470 0.0 0.0 7
8 0.2488 357.6250 -4.4375 -2.1250 -0.02 -0.16 0.26 368 202 0 0 470 0.0 0.0 7
9 0.2849 357.5000 -4.3750 -2.1250 -0.27 -0.05 0.36 368 202 0 0 477 0.0 0.0 9
10 0.3170 357.6250 -4.3125 -2.0625 0.73 -0.14 0.82 368 202 0 0 483 0.0 0.0 10
11 0.3498 357.6250 -4.3125 -2.1875 -0.90 0.41 0.34 368 202 0 0 472 0.0 0.0 10
12 0.3821 357.7500 -4.1875 -2.1250 -0.26 -0.64 0.36 368 202 0 0 475 0.0 0.0 11
13 0.4144 357.8125 -4.1250 -2.0625 0.49 0.58 0.07 368 202 0 0 471 0.0 0.0 12
14 0.4524 357.8750 -4.0000 -2.1250 0.64 0.13 0.45 368 202 0 0 473 0.0 0.0 14
15 0.5680 358.0625 -3.8125 -2.1250 5.07 -0.19 -0.12 368 202 1 1 473 1.0 1.0 17
16 0.7543 358.0625 -3.3750 -2.0625 3.12 0.13 0.11 368 202 2 2 471 1.0 1.0 23
17 0.7919 358.0625 -3.4375 -2.0625 -0.58 0.01 0.76 368 202 2 3 469 0.0 1.0 24
18 0.9386 358.0625 -3.1250 -1.6875 -2.06 -0.25 0.84 368 202 3 4 474 1.0 1.0 28
19 0.9877 358.0625 -2.7500 -1.6250 2.46 -0.24 -0.29 368 202 4 5 474 1.0 1.0 30
20 1.1234 358.3125 -2.7500 -1.4375 1.71 0.24 0.26 368 202 8 8 471 4.0 3.0 34
21 1.3037 358.8125 -2.6875 -1.2500 0.48 -0.21 0.71 368 202 15 15 470 7.0 7.0 39
22 1.3549 358.8750 -2.7500 -1.2500 0.67 -0.33 0.48 368 202 17 17 471 2.0 2.0 41
23 1.4883 359.3750 -2.5000 -1.1875 0.49 0.01 -0.45 368 202 24 24 474 7.0 7.0 45
24 1.5179 359.4375 -2.4375 -1.1875 -0.01 -0.03 -0.38 368 202 25 26 468 1.0 2.0 46
25 1.6708 0.2500 -2.3125 -1.2500 0.67 -0.06 0.23 368 202 33 34 468 8.0 8.0 50
26 1.8547 1.0000 -2.3750 -1.3125 0.21 -0.72 -0.07 368 202 43 44 471 10.0 10.0 56
27 1.8869 1.1875 -2.4375 -1.1875 0.02 -0.42 -0.66 368 202 44 46 474 1.0 2.0 57
28 2.0404 1.9375 -2.6250 -1.3125 0.04 -0.82 -0.01 368 202 54 55 489 10.0 9.0 61
29 2.0836 2.1250 -2.5625 -1.3125 0.29 -0.03 -0.13 368 202 56 57 483 2.0 2.0 63
... ... ... ... ... ... ... ... ... ... ... ... ... ... ... ...
689 28.9470 139.6875 1.4375 1.4375 -0.46 0.57 0.17 368 202 1299 1136 534 2.0 2.0 868
690 28.9725 139.6250 1.3750 1.6250 -0.50 1.00 -0.12 368 202 1300 1136 537 1.0 0.0 869
691 29.0048 139.6875 1.1250 1.7500 -0.23 0.44 -0.26 368 202 1301 1138 531 1.0 2.0 870
692 29.0376 139.5625 1.0625 1.9375 -0.67 -0.23 0.57 368 202 1302 1139 579 1.0 1.0 871
693 29.0722 139.5625 0.5625 2.1250 0.09 0.93 -0.19 368 202 1304 1140 478 2.0 1.0 872
694 29.1084 139.6250 0.0000 2.2500 -2.64 -2.65 2.61 368 202 1305 1142 480 1.0 2.0 873
695 29.1378 140.0000 0.0625 2.0625 -1.06 -0.20 -0.26 368 202 1306 1143 473 1.0 1.0 874
696 29.1723 140.1875 0.0000 1.8750 -0.62 0.44 0.07 368 202 1308 1144 471 2.0 1.0 875
697 29.2054 140.3125 0.0000 1.8125 -0.18 0.98 -0.40 368 202 1309 1145 462 1.0 1.0 876
698 29.2385 140.3125 0.0000 1.8750 -0.64 0.30 0.60 368 202 1310 1147 463 1.0 2.0 877
699 29.2772 140.6250 0.0000 1.6875 -0.65 0.13 0.27 368 202 1312 1148 492 2.0 1.0 878
700 29.3031 140.6875 -0.0625 1.4375 -0.77 0.21 0.56 368 202 1313 1149 461 1.0 1.0 879
701 29.3390 140.8125 -0.1875 1.3750 -0.63 0.52 0.00 368 202 1314 1151 508 1.0 2.0 880
702 29.3711 140.9375 -0.1875 1.3750 -0.30 0.41 -0.35 368 202 1316 1152 454 2.0 1.0 881
703 29.4056 141.1875 -0.1875 1.6250 -0.99 0.88 0.44 368 202 1317 1153 553 1.0 1.0 882
704 29.4426 141.6875 -0.1250 1.8125 -0.19 0.34 0.01 368 202 1319 1155 449 2.0 2.0 883
705 29.4712 141.9375 -0.1250 2.0000 -0.07 0.31 -0.43 368 202 1320 1156 447 1.0 1.0 884
706 29.5049 142.1875 0.0000 2.5000 -0.42 0.71 0.60 368 202 1321 1157 447 1.0 1.0 885
707 29.5385 142.4375 0.5000 3.3125 -0.08 0.38 -1.55 368 202 1322 1159 450 1.0 2.0 886
708 29.5712 142.5625 0.8750 3.3125 0.84 -0.11 2.84 368 202 1324 1160 448 2.0 1.0 887
709 29.6113 142.6250 0.6250 3.0625 -0.40 1.06 -0.43 368 202 1326 1162 447 2.0 2.0 888
710 29.6388 142.8125 0.4375 2.8750 -0.50 0.90 -0.04 368 202 1327 1163 451 1.0 1.0 889
711 29.6715 143.1250 0.3750 2.8125 0.05 0.02 -0.28 368 202 1328 1164 447 1.0 1.0 890
712 29.7054 143.3125 0.0000 2.6875 -0.33 0.69 1.87 368 202 1330 1166 442 2.0 2.0 891
713 29.7372 143.5000 0.0000 2.6250 -0.48 -0.14 0.15 368 202 1331 1167 441 1.0 1.0 892
714 29.7737 143.6875 0.0000 2.7500 -0.52 0.31 0.69 368 202 1332 1169 426 1.0 2.0 893
715 29.8055 144.0000 0.0625 2.8125 -0.72 0.22 0.01 368 202 1334 1171 398 2.0 2.0 894
716 29.8371 144.4375 0.1875 2.8750 -0.75 -0.17 -0.25 368 202 1336 1172 498 2.0 1.0 895
717 29.8707 145.0000 0.3125 3.1875 -0.87 0.48 1.48 368 202 1337 1174 343 1.0 2.0 896
718 29.9050 146.3750 0.1875 3.3750 -0.77 -0.51 -0.12 368 202 1338 1175 416 1.0 1.0 897

719 rows × 15 columns

Load video

Load video and explore sample image. Make sure timestamps are matching for video frames.


In [6]:
filename = 'C:/Users/corey/Desktop/output.mp4'
videodata = skvideo.io.vread(filename)
print(videodata.shape)


(900, 480, 640, 3)

In [15]:
df.iloc[10,:].timestamp


Out[15]:
0.31700000000000728

In [52]:
i = 0 # change this to get a different data row and corresponding frame
r = df.iloc[i,:]
print(r)
img = videodata[int(r.frame),:,:,:]
plt.imshow(img)


timestamp                   0.0000
orientation_x             357.4375
orientation_y              -4.5000
orientation_z              -1.8750
acceleration_x             -0.0200
acceleration_y             -0.1000
acceleration_z              0.2700
radio_steering_pwm        368.0000
radio_throttle_pwm        202.0000
encoder0_count              0.0000
encoder1_count              0.0000
steering_servo_voltage    490.0000
frame                       0.0000
Name: 0, dtype: float64
Out[52]:
<matplotlib.image.AxesImage at 0x11101991470>

Correct for camera distortion

Use separately trained model for removing fisheye lens camera distortion.


In [60]:
NX, NY = 9, 6
fe = fisheye.load_model('./calib.dat')

In [70]:
undist_img = fe.undistort(img, undistorted_size=(640, 480))
plt.imsave('C:/Users/corey/Desktop/undist_img.jpg', undist_img)

plt.figure(figsize=(20,20))
plt.subplot(1,2,1)
plt.imshow(img)
plt.subplot(1,2,2)
plt.imshow(undist_img)


Out[70]:
<matplotlib.image.AxesImage at 0x11104050dd8>

Find perspective rectification matrix

Use homography function with manually identified points to transform from camera view to birdseye view. I used pylons and a measuring tape to help identify the points. Need some improvement here to make this easier.


In [86]:
src_pts = np.array([[719,836],
                    [904,471],
                    [1000,471],
                    [1254,836]]) / 3.0
dst_pts = np.array([[850,1900],
                    [850,100],
                    [1110,100],
                    [1110,1900]]) / 3.0
print(src_pts)
print(dst_pts)
M2, mask = cv2.findHomography(src_pts, dst_pts, cv2.RANSAC,5.0)
M2


[[ 239.66666667  278.66666667]
 [ 301.33333333  157.        ]
 [ 333.33333333  157.        ]
 [ 418.          278.66666667]]
[[ 283.33333333  633.33333333]
 [ 283.33333333   33.33333333]
 [ 370.           33.33333333]
 [ 370.          633.33333333]]
Out[86]:
array([[ -5.52614778e-01,  -2.45299285e+00,   4.93829120e+02],
       [  5.31190955e-16,  -5.86330767e+00,   9.13737892e+02],
       [  9.58356375e-19,  -7.66905990e-03,   1.00000000e+00]])

Sample warped image. Turns out to be pretty good representation of ground plane. Checked it out with measuring tape and various manually identified points.


In [270]:
warped = cv2.warpPerspective(undist_img, M2, (700,700))
plt.figure(figsize=(12,12))
plt.subplot(1,2,1)
plt.imshow(img)
plt.subplot(1,2,2)
plt.imshow(warped)


Out[270]:
<matplotlib.image.AxesImage at 0x1114c940d68>

Sample showing original + undistored + warped side by side.


In [334]:
i = 0
img = videodata[int(df.iloc[i,:].frame),:,:,:]
undist_img = fe.undistort(img, undistorted_size=(640, 480))
warped = cv2.warpPerspective(undist_img, M2, (700,700))
plt.figure(figsize=(12,12))
plt.subplot(1,3,1)
plt.imshow(img)
plt.subplot(1,3,2)
plt.imshow(undist_img)
plt.subplot(1,3,3)
plt.imshow(warped)
plt.imsave('C:/Users/corey/Desktop/warped.png', warped)


Affine transformation to prepare for map

Experiment using heading to rotate image so it can be pasted on a map. I think the negative is needed on heading because my IMU must be on backwards I guess. Anyway - seems to match up with it.


In [272]:
rows,cols,_ = warped.shape
print(df.iloc[i,:])
M = cv2.getRotationMatrix2D((350,700),-(df.iloc[i,:].orientation_x),1)
dst = cv2.warpAffine(warped,M,(700,1400))
plt.figure(figsize=(10,10))
plt.imshow(dst[:,:,:])


timestamp                   29.905000
orientation_x              146.375000
orientation_y                0.187500
orientation_z                3.375000
acceleration_x              -0.770000
acceleration_y              -0.510000
acceleration_z              -0.120000
radio_steering_pwm         368.000000
radio_throttle_pwm         202.000000
encoder0_count            1338.000000
encoder1_count            1175.000000
steering_servo_voltage     416.000000
encoder0_diff                1.000000
encoder1_diff                1.000000
frame                      897.000000
x                          371.822739
y                          910.809259
Name: 718, dtype: float64
Out[272]:
<matplotlib.image.AxesImage at 0x1114877ec50>

In [206]:
np.dot(M, [350.0,700.0,1.0])


Out[206]:
array([ 350.,  700.])

In [119]:
df.orientation_x.plot()


Out[119]:
<matplotlib.axes._subplots.AxesSubplot at 0x11114e85978>

In [126]:
plt.plot(df.timestamp, df.encoder1_count.diff()/df.timestamp.diff())


Out[126]:
[<matplotlib.lines.Line2D at 0x111149664e0>]

Timespikes likely due to write hangs from SD card. Likely causing significant drift - need to fix.


In [125]:
plt.plot(df.timestamp.diff())


Out[125]:
[<matplotlib.lines.Line2D at 0x111148f5358>]

Plot path from encoders

Encoders give distance traveled per tick. We can interpret that as hypotenuse and use sin and cos on IMU heading to get x and y distance per tick. Below is an example driving around part of my home track.


In [ ]:
src_pts = np.array([[719,836],
                    [904,471],
                    [1000,471],
                    [1254,836]])
dst_pts = np.array([[850,1900],
                    [850,100],
                    [1110,100],
                    [1110,1900]])
print(src_pts)
print(dst_pts)
M2, mask = cv2.findHomography(src_pts, dst_pts, cv2.RANSAC,5.0)
M2

In [362]:
points = []
curr_heading = 0
x, y = 0.0, 0.0
pix_per_tick = 4.0
meters_per_revolution = 0.315
ticks_per_revolution = 20
meters_per_tick = meters_per_revolution / ticks_per_revolution
pixels_per_meter = 104.0 / 0.370
for i,r in df.iterrows():
    if i == 0:
        points.append([x,y])
    else:
        approximate_ticks = (r.encoder0_diff + r.encoder1_diff) / 2
        dist_meters = approximate_ticks * meters_per_tick
        x += np.sin(curr_heading) * dist_meters
        y += np.cos(curr_heading) * dist_meters
        points.append([x,y])
    curr_heading = np.deg2rad(r.orientation_x)
points = np.array(points)    
df['x'] = points[:,0]
df['y'] = points[:,1]
df['px'] = df['x'] * pixels_per_meter
df['py'] = df['y'] * pixels_per_meter
df.px = df.px - df.px.min() + 200
df.py = df.py - df.py.min() + 200
df.py = df.py.max() - df.py + 200
map_width = df.px.max()
map_height = df.py.max()
print(map_width, map_height)
map_img = np.zeros((1500, 1200, 3))
plt.figure(figsize=(12,12))
plt.axis('equal')
plt.imshow(map_img)
plt.plot(df.px, df.py)


1127.51512917 1475.46392084
Out[362]:
[<matplotlib.lines.Line2D at 0x111b46f2fd0>]

Attempt with wheel encoder distances only (no IMU)


In [453]:
import importlib
importlib.reload(encoder_math)
points = []
curr_heading = 0
x, y = 0.0, 0.0
pix_per_tick = 4.0
meters_per_revolution = 0.315
ticks_per_revolution = 20
meters_per_tick = meters_per_revolution / ticks_per_revolution
pixels_per_meter = 104.0 / 0.370
vehicle_width = 0.265
half_vehicle_width = vehicle_width / 2.0
center_position = np.array([x, y])
half_width_offset_vec = np.array([vehicle_width / 2.0, 0.0])
left_position = center_position - half_width_offset_vec
right_position = center_position + half_width_offset_vec
left_points = []
right_points = []
for i,r in df.iterrows():
    if i == 0:
        points.append(center_position)
        left_points.append(left_position)
        right_points.append(right_position)
    else:
        right_distance = r.encoder0_diff * meters_per_tick
        left_distance = r.encoder1_diff * meters_per_tick
        left_position, center_position, right_position = [np.array(x) for x in encoder_math.apply_distance(half_vehicle_width, left_position, center_position, right_position, left_distance, right_distance)]
        points.append(np.array(center_position))
        left_points.append(np.array(left_position))
        right_points.append(np.array(right_position))
    curr_heading = np.deg2rad(r.orientation_x)
points = np.array(points)    
left_points = np.array(left_points)  
right_points = np.array(right_points)  
df['wx'] = points[:,0]
df['wy'] = points[:,1]
df['lx'] = left_points[:,0]
df['ly'] = left_points[:,1]
df['rx'] = right_points[:,0]
df['ry'] = right_points[:,1]
df['pwx'] = df['wx'] * pixels_per_meter
df['pwy'] = df['wy'] * pixels_per_meter
df.pwx = df.pwx - df.pwx.min() + 200
df.pwy = df.pwy - df.pwy.min() + 200
df.pwy = df.pwy.max() - df.pwy + 200
map_width = df.pwx.max()
map_height = df.pwy.max()
print(map_width, map_height)
map_img = np.zeros((1500, 1200, 3))
plt.figure(figsize=(12,12))
plt.axis('equal')
plt.plot(df.wx, df.wy, df.lx, df.ly, df.rx, df.ry)


1141.8770928 1530.39162269
Out[453]:
[<matplotlib.lines.Line2D at 0x112052a53c8>,
 <matplotlib.lines.Line2D at 0x112052a5588>,
 <matplotlib.lines.Line2D at 0x112052a5e10>]

In [428]:
points


Out[428]:
array([[  0.        ,   0.        ],
       [  0.        ,   0.        ],
       [  0.        ,   0.        ],
       ..., 
       [-12.40001635,  59.81808608],
       [-11.63541203,  60.03452835],
       [-11.63067375,  60.01950799]])

In [434]:
df[['orientation_x','encoder0_diff','encoder1_diff','x','y','lx','ly','wx','wy','rx','ry']]


Out[434]:
orientation_x encoder0_diff encoder1_diff x y lx ly wx wy rx ry
0 357.4375 NaN NaN 0.000000 0.000000 -0.132500 0.000000 0.000000 0.000000 0.132500 0.000000
1 357.5000 0.0 0.0 0.000000 0.000000 -0.132500 0.000000 0.000000 0.000000 0.132500 0.000000
2 357.5625 0.0 0.0 0.000000 0.000000 -0.132500 0.000000 0.000000 0.000000 0.132500 0.000000
3 357.6250 0.0 0.0 0.000000 0.000000 -0.132500 0.000000 0.000000 0.000000 0.132500 0.000000
4 357.7500 0.0 0.0 0.000000 0.000000 -0.132500 0.000000 0.000000 0.000000 0.132500 0.000000
5 357.8125 0.0 0.0 0.000000 0.000000 -0.132500 0.000000 0.000000 0.000000 0.132500 0.000000
6 357.8750 0.0 0.0 0.000000 0.000000 -0.132500 0.000000 0.000000 0.000000 0.132500 0.000000
7 357.8125 0.0 0.0 0.000000 0.000000 -0.132500 0.000000 0.000000 0.000000 0.132500 0.000000
8 357.6250 0.0 0.0 0.000000 0.000000 -0.132500 0.000000 0.000000 0.000000 0.132500 0.000000
9 357.5000 0.0 0.0 0.000000 0.000000 -0.132500 0.000000 0.000000 0.000000 0.132500 0.000000
10 357.6250 0.0 0.0 0.000000 0.000000 -0.132500 0.000000 0.000000 0.000000 0.132500 0.000000
11 357.6250 0.0 0.0 0.000000 0.000000 -0.132500 0.000000 0.000000 0.000000 0.132500 0.000000
12 357.7500 0.0 0.0 0.000000 0.000000 -0.132500 0.000000 0.000000 0.000000 0.132500 0.000000
13 357.8125 0.0 0.0 0.000000 0.000000 -0.132500 0.000000 0.000000 0.000000 0.132500 0.000000
14 357.8750 0.0 0.0 0.000000 0.000000 -0.132500 0.000000 0.000000 0.000000 0.132500 0.000000
15 358.0625 1.0 1.0 -0.000584 0.015739 -0.132500 0.015750 0.000000 0.015750 0.132500 0.015750
16 358.0625 1.0 1.0 -0.001117 0.031480 -0.132500 0.031500 0.000000 0.031500 0.132500 0.031500
17 358.0625 0.0 1.0 -0.001383 0.039351 -0.132032 0.047241 0.000234 0.039370 0.132500 0.031500
18 358.0625 1.0 1.0 -0.001915 0.055092 -0.131097 0.062963 0.001169 0.055093 0.133436 0.047222
19 358.0625 1.0 1.0 -0.002448 0.070833 -0.130161 0.078685 0.002105 0.070815 0.134371 0.062944
20 358.3125 4.0 3.0 -0.004311 0.125926 -0.128757 0.125907 0.003743 0.125907 0.136243 0.125907
21 358.8125 7.0 7.0 -0.007558 0.236128 -0.128757 0.236157 0.003743 0.236157 0.136243 0.236157
22 358.8750 2.0 2.0 -0.008211 0.267622 -0.128757 0.267657 0.003743 0.267657 0.136243 0.267657
23 359.3750 7.0 7.0 -0.010376 0.377850 -0.128757 0.377907 0.003743 0.377907 0.136243 0.377907
24 359.4375 1.0 2.0 -0.010633 0.401474 -0.127822 0.409389 0.004445 0.401518 0.136711 0.393648
25 0.2500 8.0 8.0 -0.011870 0.527468 -0.120337 0.535166 0.011929 0.527296 0.144195 0.519426
26 1.0000 10.0 10.0 -0.011183 0.684966 -0.110982 0.692388 0.021284 0.684518 0.153550 0.676647
27 1.1875 1.0 2.0 -0.010771 0.708588 -0.108178 0.723758 0.023387 0.708046 0.154952 0.692333
28 1.9375 10.0 9.0 -0.007670 0.858181 -0.095559 0.864925 0.036707 0.857054 0.168973 0.849184
29 2.1250 2.0 2.0 -0.006605 0.889663 -0.093688 0.896369 0.038578 0.888499 0.170844 0.880628
... ... ... ... ... ... ... ... ... ... ... ...
689 139.6875 2.0 2.0 -2.915093 0.603424 -2.723356 1.616457 -2.853122 1.589676 -2.982887 1.562895
690 139.6250 1.0 0.0 -2.909998 0.597419 -2.723356 1.616457 -2.851302 1.582015 -2.979247 1.547573
691 139.6875 1.0 2.0 -2.894694 0.579421 -2.716077 1.585814 -2.845842 1.559033 -2.975607 1.532252
692 139.5625 1.0 1.0 -2.884505 0.567411 -2.712893 1.570390 -2.842659 1.543608 -2.972424 1.516827
693 139.5625 2.0 1.0 -2.869181 0.549430 -2.709254 1.555068 -2.837199 1.520627 -2.965144 1.486185
694 139.6250 1.0 2.0 -2.853858 0.531448 -2.701974 1.524426 -2.831739 1.497645 -2.961504 1.470863
695 140.0000 1.0 1.0 -2.843655 0.519450 -2.698791 1.509001 -2.828556 1.482220 -2.958321 1.455439
696 140.1875 2.0 1.0 -2.828469 0.501352 -2.695151 1.493680 -2.823096 1.459238 -2.951041 1.424796
697 140.3125 1.0 1.0 -2.818385 0.489253 -2.691057 1.478471 -2.819002 1.444029 -2.946947 1.409587
698 140.3125 1.0 2.0 -2.803298 0.471073 -2.683777 1.447828 -2.813542 1.421047 -2.943308 1.394266
699 140.6250 2.0 1.0 -2.788211 0.452893 -2.680137 1.432507 -2.808083 1.398065 -2.936028 1.363624
700 140.6875 1.0 1.0 -2.778219 0.440718 -2.676043 1.417299 -2.803989 1.382857 -2.931934 1.348415
701 140.8125 1.0 2.0 -2.763252 0.422439 -2.668764 1.386656 -2.798529 1.359875 -2.928294 1.333094
702 140.9375 2.0 1.0 -2.748324 0.404128 -2.665124 1.371335 -2.793069 1.336893 -2.921015 1.302451
703 141.1875 1.0 1.0 -2.738399 0.391899 -2.661030 1.356126 -2.788975 1.321684 -2.916921 1.287243
704 141.6875 2.0 2.0 -2.718655 0.367354 -2.652842 1.325709 -2.780787 1.291267 -2.908733 1.256825
705 141.9375 1.0 1.0 -2.708891 0.354996 -2.648748 1.310500 -2.776693 1.276059 -2.904639 1.241617
706 142.1875 1.0 1.0 -2.699181 0.342595 -2.644654 1.295292 -2.772599 1.260850 -2.900545 1.226408
707 142.4375 1.0 2.0 -2.684697 0.323931 -2.637374 1.264649 -2.767140 1.237868 -2.896905 1.211087
708 142.5625 2.0 1.0 -2.670295 0.305204 -2.633734 1.249328 -2.761680 1.214886 -2.889625 1.180444
709 142.6250 2.0 2.0 -2.651146 0.280192 -2.625546 1.218911 -2.753492 1.184469 -2.881437 1.150027
710 142.8125 1.0 1.0 -2.641585 0.267676 -2.621452 1.203702 -2.749398 1.169260 -2.877343 1.134819
711 143.1250 1.0 1.0 -2.632065 0.255128 -2.617358 1.188493 -2.745304 1.154052 -2.873249 1.119610
712 143.3125 2.0 2.0 -2.613163 0.229930 -2.609170 1.158076 -2.737116 1.123635 -2.865061 1.089193
713 143.5000 1.0 1.0 -2.603753 0.217300 -2.605076 1.142868 -2.733022 1.108426 -2.860967 1.073984
714 143.6875 1.0 2.0 -2.589701 0.198309 -2.597797 1.112225 -2.727562 1.085444 -2.857327 1.058663
715 144.0000 2.0 2.0 -2.571047 0.172926 -2.591430 1.081375 -2.721195 1.054594 -2.850960 1.027813
716 144.4375 2.0 1.0 -2.557160 0.153813 -2.587790 1.066054 -2.715735 1.031612 -2.843681 0.997170
717 145.0000 1.0 2.0 -2.543420 0.134595 -2.580511 1.035411 -2.710276 1.008630 -2.840041 0.981849
718 146.3750 1.0 1.0 -2.534386 0.121693 -2.577327 1.019987 -2.707092 0.993205 -2.836858 0.966424

719 rows × 11 columns


In [283]:
plt.figure(figsize=(12,12))
plt.axis('equal')
plt.plot(df.x, df.y)


Out[283]:
[<matplotlib.lines.Line2D at 0x1113e645710>]

Apply images

We can use the positions to collect undistorted / warped images onto a map image. For now, don't worry about size and just paste a smaller portion.

Future:

  • establish a depth buffer so that portions captured when camera is closer override portions captured while far away
  • method to weight multiple image captures together regardless of depth (combine with above somehow)
  • image alignment to use this to help with localization (i.e., include in Kalman filter with encoders, etc... for position estimate)
  • path optimization, other usage, etc...
  • Donkey part version of this

In [291]:
Rect = namedtuple('Rect', 'x1 y1 x2 y2')

In [297]:
warped_size = np.array([700, 700])
persp_size = warped_size * [1, 2]
persp_center = persp_size // 2
print(warped_size)
print(persp_size)
print(persp_center)


[700 700]
[ 700 1400]
[350 700]

In [307]:
np.clip(-10, 1, 20)


Out[307]:
1

In [442]:
map_size = 1200, 1500
map_img = np.zeros((map_size[1], map_size[0], 3), dtype=np.uint8)
h2 = 50
w2 = 50
n = int(df.shape[0]/2)
warped_size = 700, 700
persp_size = warped_size[0], warped_size[1] * 2
persp_half = persp_size[0] // 2, persp_size[1] // 2
persp_rot_center = (321, 699)
for i in range(0, n, 100):
    rows,cols,_ = warped.shape
    #print(df.iloc[i,:])
    r = df.iloc[i,:]
    img = videodata[int(r.frame),:,:,:]
    undist_img = fe.undistort(img, undistorted_size=(640, 480))
    warped = cv2.warpPerspective(undist_img, M2, warped_size)
    M = cv2.getRotationMatrix2D(persp_rot_center,-int(df.iloc[i,:].orientation_x),1)
    persp = cv2.warpAffine(warped,M,persp_size)
    x, y = int(r.px)+25, int(r.py)
    mx1 = np.clip(x - persp_half[0], 0, map_size[0]-1)
    mx2 = np.clip(x + persp_half[0], 0, map_size[0]-1)
    my1 = np.clip(y - persp_half[1], 0, map_size[1]-1)
    my2 = np.clip(y + persp_half[1], 0, map_size[1]-1)
    px1 = persp_half[0] - (x - mx1)
    px2 = persp_half[0] + (mx2 - x)
    py1 = persp_half[1] - (y - my1)
    py2 = persp_half[1] + (my2 - y)
    try:
        #map_img[my1:my2,mx1:mx2,:] = np.maximum(map_img[my1:my2,mx1:mx2,:], persp[py1:py2,px1:px2,:])
        map_img[my1:my2,mx1:mx2,:] += persp[py1:py2,px1:px2,:] // 1
    except Exception as e:
        print(e)
plt.figure(figsize=(10,10))
plt.imshow(map_img)
plt.plot(df.px[0:n], df.py[0:n], df.pwx[0:n], df.pwy[0:n])


Out[442]:
[<matplotlib.lines.Line2D at 0x111a53ad780>,
 <matplotlib.lines.Line2D at 0x111a53ada20>]

Simple demo image


In [266]:
i = 200
img = videodata[int(df.iloc[i,:].frame),:,:,:]
undist_img = fe.undistort(img, undistorted_size=(640, 480))
warped = cv2.warpPerspective(undist_img, M2, (700,700))
plt.figure(figsize=(12,12))
plt.subplot(2,2,1)
plt.imshow(img)
plt.subplot(2,2,2)
plt.imshow(undist_img)
plt.subplot(2,2,3)
plt.imshow(warped)
plt.subplot(2,2,4)
plt.imshow(map_img)
plt.plot(df.x[0:n], df.y[0:n])


Out[266]:
[<matplotlib.lines.Line2D at 0x1114cc91198>]

In [ ]: