In [1]:
%matplotlib inline
import numpy as np
import cv2
import matplotlib.pyplot as plt
import os
import math
from mpl_toolkits.mplot3d import Axes3D
# We consider keypoints from the same image, but from different matches, equal if they are within 5px from each other
# kp1[i] from orb(img1,img2), in img1, is equal to kp1[j] from orb(img1,img3), in img1 too,
# if kp1[i] and kp1[j] are distanced < MATCH_TOLERANCE_RAY
MATCH_TOLERANCE_RAY = 5
# Image plot function
def plotImage(title,image):
plt.figure(figsize=(10,10))
plt.imshow(image)
plt.title(title)
plt.axis('off')
plt.show()
# ORB
def Orb_descript(imgA, imgB, title):
orb = cv2.ORB_create()
resultsImg = imgB.copy()
kpA, desA = orb.detectAndCompute(imgA,None)
kpB, desB = orb.detectAndCompute(imgB,None)
bf = cv2.BFMatcher(cv2.NORM_HAMMING)
matches = bf.knnMatch(desA,desB, k=2)
good = []
U = []
V = []
for m,n in matches:
if m.distance < n.distance:
good.append(m)
U.append([kpA[m.trainIdx].pt[0],kpB[m.queryIdx].pt[0]])
V.append([kpA[m.trainIdx].pt[1],kpB[m.queryIdx].pt[1]])
src_pts = np.float32([ kpA[m.queryIdx].pt for m in good ]).reshape(-1,1,2)
dst_pts = np.float32([ kpB[m.trainIdx].pt for m in good ]).reshape(-1,1,2)
M, mask = cv2.findHomography(src_pts, dst_pts, cv2.RANSAC,5.0)
matchesMask = mask.ravel().tolist()
h,w = imgA.shape
pts = np.float32([ [0,0],[0,h-1],[w-1,h-1],[w-1,0] ]).reshape(-1,1,2)
dst = cv2.perspectiveTransform(pts,M)
img2 = cv2.polylines(resultsImg,[np.int32(dst)],True,255,3, cv2.LINE_AA)
draw_params = dict(matchColor = (0,255,128), # draw matches in blue color
singlePointColor = (0,255,128),
matchesMask = matchesMask, # draw only inliers
flags = 2)
img3 = cv2.drawMatches(imgA,kpA,resultsImg,kpB,good,None,**draw_params)
plotImage(title, img3)
return np.array(U), np.array(V)
# function to centralize W
def centralizeW(W):
newW = np.zeros(W.shape)
for i in range(W.shape[0]):
m = (W[i][0] + W[i][1])/2
newW[i][0] = W[i][0] - m
newW[i][1] = W[i][1] - m
return newW;
# function to perform euclidian between two points (tuples: (x,y))
def classicEuclidian(p1, p2):
return math.sqrt(np.power((p1[0] - p2[0]),2) + np.power((p1[1] - p2[1]),2))
# find equivalent key points from two U and V matrixes, using a fixed image as reference
# (receiving it's points coordinates positions in the Us and Vs, must be the same position)
def findEquivalentKeyPoints(Um1, Vm1, Um2, Vm2, referencePos):
matchedU = []
matchedV = []
for i in range(0, Um1.shape[0]):
kp1 = (Um1[i][referencePos],Vm1[i][referencePos])
for j in range(0, Um2.shape[0]):
kp2 = (Um2[j][referencePos],Vm2[j][referencePos])
if(classicEuclidian(kp1,kp2) < MATCH_TOLERANCE_RAY):
matchedU.append(Um1[i])
matchedV.append(Vm1[i])
break
return np.array(matchedU), np.array(matchedV)
# find equivalent key points from two U and V matrixes, using a fixed image as reference
# (receiving it's points coordinates positions in the Us and Vs, must be the same position)
# and merge the Us and Vs for the keypoints that matched
# example: image 1 and image 2, image 1 and image 3, creates U[:][0:3] and V[:][0:3] for the keypoints of image 1 both U and V have
def findEquivalentKeyPointsAndMerge(Um1, Vm1, Um2, Vm2, referencePos):
matchedU = []
matchedV = []
toMerge = 0
if referencePos == 0:
toMerge = 1 # toMerge = the position not equals referencePos in Um2 and Vm2
for i in range(0, Um1.shape[0]):
kp1 = (Um1[i][referencePos],Vm1[i][referencePos])
for j in range(0, Um2.shape[0]):
kp2 = (Um2[j][referencePos],Vm2[j][referencePos])
if(classicEuclidian(kp1,kp2) < MATCH_TOLERANCE_RAY):
matchedU.append((Um1[i][0],Um1[i][1],Um2[j][toMerge]))
matchedV.append((Vm1[i][0],Vm1[i][1],Vm2[j][toMerge]))
break
return np.array(matchedU), np.array(matchedV)
# Draw 3D plot
def showPointCloud(R, S):
num_pts = R.shape[0] + S.shape[0]
px = np.zeros(num_pts, dtype=np.float)
py = np.zeros(num_pts, dtype=np.float)
pz = np.zeros(num_pts, dtype=np.float)
colors = []
idx = 0
for i in S: # Draw points of the cloud
px[idx] = i[0]
py[idx] = i[1]
pz[idx] = i[2]
colors.append('r') # red
idx = idx + 1
for i in R: # Draw cameras
px[idx] = i[0]
py[idx] = i[1]
pz[idx] = i[2]
colors.append('b') # blue
idx = idx + 1
fig = plt.figure()
ax = fig.add_subplot(111, projection='3d')
ax.set_xlabel('X')
ax.set_ylabel('Y')
ax.set_zlabel('Z')
ax.scatter(px, py, pz, c=colors)
plt.show()
# BEGINS ----------------------------------------------------------------------
img1 = cv2.imread('img1.jpg', 0)
img2 = cv2.imread('img2.jpg', 0)
img3 = cv2.imread('img3.jpg', 0)
U1, V1 = Orb_descript(img2,img1, "Images 1 and 2 matches")
U2, V2 = Orb_descript(img2,img3, "Images 2 and 3 matches")
U3, V3 = Orb_descript(img1,img3, "Images 1 and 3 matches")
U23, V23 = findEquivalentKeyPoints(U2, V2, U3, V3, 1) #find equivalents in Us and Vs using image 3 as reference
U123, V123 = findEquivalentKeyPointsAndMerge(U1, V1, U23, V23, 0) #find equivalents in Us and Vs using image 2 as reference
W = np.concatenate((U123,V123), axis = 0)
print("Shape of U: ", U123.shape)
print("Shape of V: ", V123.shape)
print("Shape of W: ", W.shape)
W = centralizeW(W);
U, s, V = np.linalg.svd(W, full_matrices=True)
print("Shape of SVD(W): ", (U.shape, s.shape, V.shape))
U3 = U[:][0:3]
V3 = V[:][0:3]
D3 = np.array([[s[0], 0, 0],
[0, s[1], 0],
[0, 0, s[2]]])
S = np.dot(U3.T,np.sqrt(D3))
R = np.dot(np.sqrt(D3),V3.T)
print("Shape of S: ", S.shape)
print("Shape of R: ", R.shape)
print("Final cloud for ORB:")
showPointCloud(R, S)
Harris ...
In [3]:
def compute_harris_points(img, sigma=2):
#compute derivates in the image
imx = np.zeros(img.size)
imy = np.zeros(img.size)
imx = filters.gaussian_filter(img, (sigma,sigma), (0,1))
imy = filters.gaussian_filter(img, (sigma,sigma), (1,0))
# compute the products of derivatives at every pixel
Sxx = filters.gaussian_filter(imx*imx,sigma)
Sxy = filters.gaussian_filter(imx*imy,sigma)
Syy = filters.gaussian_filter(imy*imy,sigma)
# determinant and trace
Mdet = Sxx*Syy - Sxy**2
Mtr = Sxx + Syy
harris = np.divide(Mdet, Mtr)
harris[np.isposinf(harris)] = 0
harris[np.isnan(harris)] = 0
return harris
def doHarrisNonMaxSupression(harrisim,min_dist=10,threshold=0.1):
#Return corners from a Harris response image
#min_dist is the minimum number of pixels separating
#corners and image boundary.
global t
global dist
dist=min_dist
t=threshold
#print(t)
# find top corner candidates above a threshold
corner_threshold = harrisim.max() * threshold
harrisim_t = (harrisim > corner_threshold) * 1
# get coordinates of candidates
coords = array(harrisim_t.nonzero()).T
# ...and their values
candidate_values = [harrisim[c[0],c[1]] for c in coords]
# sort candidates
index = argsort(candidate_values)
# store allowed point locations in array
allowed_locations = zeros(harrisim.shape)
allowed_locations[min_dist:-min_dist,min_dist:-min_dist] = 1
# select the best points taking min_distance into account
filtered_coords = []
for i in index:
if allowed_locations[coords[i,0],coords[i,1]] == 1:
filtered_coords.append(coords[i])
allowed_locations[(coords[i,0]-min_dist):(coords[i,0]+min_dist),
(coords[i,1]-min_dist):(coords[i,1]+min_dist)] = 0
return filtered_coords
def plot_harris_points(image,filtered_coords):
#""" Plots corners found in image. """
plt.figure()
gray()
plt.imshow(image)
plt.title('Harris corner detection, dist=%s and threshold=%s'%(dist,t))
plt.plot([p[1] for p in filtered_coords],[p[0] for p in filtered_coords],'*',color = 'r')
plt.axis('off')
plt.show()
def get_descriptors(image,filtered_coords,wid=5):
desc = []
for coords in filtered_coords:
patch = image[coords[0]-wid:coords[0]+wid+1, coords[1]-wid:coords[1]+wid+1].flatten()
desc.append(patch)
return desc
def match(desc1,desc2,threshold=0.5):
n = len(desc1[0])
# pair-wise distances
d = -ones((len(desc1),len(desc2)))
for i in range(len(desc1)):
for j in range(len(desc2)):
d1 = (desc1[i] - mean(desc1[i])) / std(desc1[i])
d2 = (desc2[j] - mean(desc2[j])) / std(desc2[j])
ncc_value = sum(d1 * d2) / (n-1)
if ncc_value > threshold:
d[i,j] = ncc_value
ndx = argsort(-d)
matchscores = ndx[:,0]
return matchscores
def match_twosided(desc1,desc2,threshold=0.5):
matches_12 = match(desc1,desc2,threshold)
matches_21 = match(desc2,desc1,threshold)
ndx_12 = where(matches_12 >= 0)[0]
# remove matches that are not symmetric
for n in ndx_12:
if matches_21[matches_12[n]] != n:
matches_12[n] = -1
return matches_12
def appendimages(im1,im2):
# select the image with the fewest rows and fill in enough empty rows
rows1 = im1.shape[0]
rows2 = im2.shape[0]
if rows1 < rows2:
im1 = concatenate((im1,zeros((rows2-rows1,im1.shape[1]))),axis=0)
elif rows1 > rows2:
im2 = concatenate((im2,zeros((rows1-rows2,im2.shape[1]))),axis=0)
# if none of these cases they are equal, no filling needed.
return concatenate((im1,im2), axis=1)
def plot_matches(im1,im2,locs1,locs2,matchscores,show_below=True):
im3 = appendimages(im1,im2)
if show_below:
im3 = vstack((im3,im3))
plt.imshow(im3)
cols1 = im1.shape[1]
for i,m in enumerate(matchscores):
if m>0:
plot([locs1[i][1],locs2[m][1]+cols1],[locs1[i][0],locs2[m][0]],'c')
axis('off')
wid =5
gray1_1=img1
gray2_2=img2
harrisim1 = compute_harris_points(gray1_1)
filtered_coords1 = doHarrisNonMaxSupression(harrisim1)
#plot_harris_points(gray1_1, filtered_coords1)
#print(filtered_coords1)
d1 = get_descriptors(gray1_1, filtered_coords1, wid)
harrisim2 = compute_harris_points(gray2_2)
filtered_coords2 = doHarrisNonMaxSupression(harrisim2)
#plot_harris_points(gray2_2, filtered_coords2)
#print(filtered_coords2)
d2 = get_descriptors(gray2_2, filtered_coords2, wid)
matches = match_twosided(d1,d2)
plt.figure(figsize=(12,12))
gray()
plot_matches(gray1_1,gray2_2,filtered_coords1,filtered_coords2,matches)
plt.show()
pts11 = np.array(filtered_coords1).reshape(-1,1,2)
pts22 = np.array(filtered_coords2).reshape(-1,1,2)
F1, mask1 = cv2.findFundamentalMat(pts11, pts22, cv2.RANSAC, 5.0)
print(F1)
U=pts11
V=pts22
#########
In [ ]: