pythonopencvaffinetransformhomographyimage-registration

Extracting rotational-free translational information out of transformation matrix?


I use SIFT feature detector and need the angle AND the center translation of two rigid2D transformed images. Therefor I want to extract the information directly out of the transformation matrix.

Attached is a minimum working example. The idea is to to build a rotation matrix with the angle I get from the image registration and then extract backwards via matrix multiplication the translation matrix. I think it should work but some small thing is missing. Goal of the minimum working example is to calculate the initial set parameters (tx,ty,angle) out of the image registration.

import numpy as np
import cv2 as cv, cv2
import matplotlib.pyplot as plt
import math

img=cv2.imread(r"\lena_std.tif",cv2.IMREAD_GRAYSCALE)

tx=0 #parameters to play which should be totally compensated by the code
ty=0
angle=60

rows,cols= img.shape

rot_mat = cv2.getRotationMatrix2D((cols/2,rows/2),angle,1)
rot_mat=np.vstack([rot_mat, np.array([0,0,1])])
trans_mat = np.array([
    [1, 0, tx],
    [0, 1, ty],
    [0, 0, 1]
], dtype=np.float32)
M=trans_mat @ rot_mat

reference_image = img
test_image = cv2.warpAffine(img,M[:2,:3],(cols,rows))

def similarity_sift_flann_affine2D (reference_image, test_image):

    img1=test_image.astype(np.uint8)
    img2=reference_image.astype(np.uint8)

    # Initiate SIFT detector
    sift = cv.SIFT_create()
    # find the keypoints and descriptors with SIFT
    kp1, des1 = sift.detectAndCompute(img1,None)
    kp2, des2 = sift.detectAndCompute(img2,None)
    FLANN_INDEX_KDTREE = 1
    index_params = dict(algorithm = FLANN_INDEX_KDTREE, trees = 5)
    search_params = dict(checks = 50)
    flann = cv.FlannBasedMatcher(index_params, search_params)
    matches = flann.knnMatch(des1,des2,k=2)
    # store all the good matches as per Lowe's ratio test.
    good = []
    for m,n in matches:
        if m.distance < 0.7*n.distance:     #Lowes ratio test
            good.append(m)
        
    MIN_MATCH_COUNT=10
        
    if len(good)>=MIN_MATCH_COUNT:
        src_pts = np.float32([ kp1[m.queryIdx].pt for m in good ]).reshape(-1,1,2)
        dst_pts = np.float32([ kp2[m.trainIdx].pt for m in good ]).reshape(-1,1,2)
        M, mask = cv2.estimateAffinePartial2D(src_pts, dst_pts)
        #M,mask = cv.estimateAffine2D(src_pts, dst_pts)
    
    else:
        print( "Not enough matches are found - {}/{}".format(len(good), MIN_MATCH_COUNT) )
        matchesMask = None
        
    return M

M1=similarity_sift_flann_affine2D (reference_image, test_image)

sizeImg1=img.shape
u, _, vh = np.linalg.svd(M[0:2, 0:2])   
R = u @ vh
angle2 = math.atan2(R[1,0], R[0,0])
angle2 =np.rad2deg(angle2)

cor_rot_mat = cv2.getRotationMatrix2D((cols/2,rows/2),-angle2,1)
cor_rot_mat=np.vstack([cor_rot_mat, np.array([0,0,1])])

M1=np.vstack([M1, np.array([0,0,1])])
#cor_trans_mat= np.linalg.inv(cor_rot_mat) @ M1
cor_trans_mat= M1 @ np.linalg.inv(cor_rot_mat)

print("angle diff", str(np.abs(angle2)-np.abs(angle)))
print("tx diff", str(np.abs(tx)-np.abs(cor_trans_mat[0][2])))
print("ty diff", str(np.abs(ty)-np.abs(cor_trans_mat[1][2])))

Can someone help? Many thanks!


Solution

  • I found the solution, the theory behind my code was correct. The error was the sign of angle2 and I commented out the wrong inverse matrix multiplication.

    This code works:

    import numpy as np
    import cv2 as cv, cv2
    import matplotlib.pyplot as plt
    import math
    
    img=cv2.imread(r"\lena_std.tif",cv2.IMREAD_GRAYSCALE)
    
    tx=36 #parameters to play which should be totally compensated by the code
    ty=120
    angle=30
    
    rows,cols= img.shape
    
    rot_mat = cv2.getRotationMatrix2D((cols/2,rows/2),angle,1)
    rot_mat=np.vstack([rot_mat, np.array([0,0,1])])
    trans_mat = np.array([
        [1, 0, tx],
        [0, 1, ty],
        [0, 0, 1]
    ], dtype=np.float32)
    M=trans_mat @ rot_mat
    
    reference_image = img
    test_image = cv2.warpAffine(img,M[:2,:3],(cols,rows))
    
    def similarity_sift_flann_affine2D (reference_image, test_image):
    
        img1=test_image.astype(np.uint8)
        img2=reference_image.astype(np.uint8)
    
    # Initiate SIFT detector
        sift = cv.SIFT_create()
    # find the keypoints and descriptors with SIFT
        kp1, des1 = sift.detectAndCompute(img1,None)
        kp2, des2 = sift.detectAndCompute(img2,None)
        FLANN_INDEX_KDTREE = 1
        index_params = dict(algorithm = FLANN_INDEX_KDTREE, trees = 5)
        search_params = dict(checks = 50)
        flann = cv.FlannBasedMatcher(index_params, search_params)
        matches = flann.knnMatch(des1,des2,k=2)
    # store all the good matches as per Lowe's ratio test.
        good = []
        for m,n in matches:
            if m.distance < 0.7*n.distance:     #Lowes ratio test
            good.append(m)
        
        MIN_MATCH_COUNT=10
        
        if len(good)>=MIN_MATCH_COUNT:
            src_pts = np.float32([ kp1[m.queryIdx].pt for m in good ]).reshape(-1,1,2)
            dst_pts = np.float32([ kp2[m.trainIdx].pt for m in good ]).reshape(-1,1,2)
            M, mask = cv2.estimateAffinePartial2D(src_pts, dst_pts)
            #M,mask = cv.estimateAffine2D(src_pts, dst_pts)
    
        else:
            print( "Not enough matches are found - {}/{}".format(len(good), MIN_MATCH_COUNT) )
            matchesMask = None
        
        return M
    
    M1=similarity_sift_flann_affine2D (reference_image, test_image)
    
    sizeImg1=img.shape
    u, _, vh = np.linalg.svd(M[0:2, 0:2])   
    R = u @ vh
    angle2 = math.atan2(R[1,0], R[0,0])
    angle2 =np.rad2deg(angle2)
    
    cor_rot_mat = cv2.getRotationMatrix2D((cols/2,rows/2),angle2,1)
    cor_rot_mat=np.vstack([cor_rot_mat, np.array([0,0,1])])
    
    M1=np.vstack([M1, np.array([0,0,1])])
    cor_trans_mat= np.linalg.inv(cor_rot_mat) @ M1
    
    print("angle diff", str(np.abs(angle2)-np.abs(angle)))
    print("tx diff", str(np.abs(tx)-np.abs(cor_trans_mat[0][2])))
    print("ty diff", str(np.abs(ty)-np.abs(cor_trans_mat[1][2])))