c++opencvransacvisual-odometry

OpenCV Both RANSAC and LMeDS making an essential matrix of size 0


I was trying to use the findEssentialMat function to produce an essential matrix and kept getting an empty matrix, even with very low probability and high threshold values. I made reproduceable code that tries to compute the essential matrix from a still image, and I still get no essential matrix. I'm not sure why this is happening.

Code:

int main(int argc, char** argv) {

    Mat in = imread("test.jpg", IMREAD_GRAYSCALE);

    std::vector<KeyPoint> keypoints;
    std::vector<Point2f> points;
    std::vector<Point2f> prevPoints;
    std::vector<uchar> status;

    points = featureDetection(in, keypoints, 30);
    prevPoints = std::vector<Point2f>(points);

    double focal = 0;
    Point2d opticalCenter(in.rows / 2, in.cols / 2);
    
    // Track features
    featureTracking(in, in, points, prevPoints, status);

    // FIXME RANSAC algorithm not working. Try LMEDS?
    Mat E, mask;
    E = findEssentialMat(points, prevPoints, focal, opticalCenter, RANSAC, 0.001, 100.0, mask);
    Mat R, t;
    if(E.size().area() == 0) {
        std::cout << mask.size().area() << " points, essential matrix is empty\n";
    } else {
        recoverPose(E, points, prevPoints, R, t, focal, opticalCenter, mask);
    }

    // Draw tracked features (this frame)
    for(int i = 0; i < prevPoints.size(); i++) {
        // Tracking lines
        line(in, points[i], prevPoints[i], Scalar(0, 100, 0), 5, LineTypes::LINE_4);
    }  

    // Show output
    imshow("Data", in);
    char c = waitKey(0);

    imwrite("out.jpg", in);
std::vector<Point2f> featureDetection(const Mat& imgIn, std::vector<KeyPoint>& pointsOut, int threshold)    { 
    bool nonmaxSuppression = true;
    FAST(imgIn, pointsOut, threshold, nonmaxSuppression);
    std::vector<Point2f> points(0);
    for(KeyPoint p : pointsOut) {
        points.push_back(p.pt);
    }
    return points;
}
void featureTracking(const Mat& img_1, const Mat& img_2, std::vector<Point2f>& points1, std::vector<Point2f>& points2, std::vector<uchar>& status)  { 

    //this function automatically gets rid of points for which tracking fails

    std::vector<float> err;                 
    Size winSize=Size(21,21);                                                                                               
    TermCriteria termcrit=TermCriteria(TermCriteria::COUNT+TermCriteria::EPS, 30, 0.01);

    cv::calcOpticalFlowPyrLK(img_1, img_2, points1, points2, status, err, winSize, 3, termcrit, 0, 0.001);

    //getting rid of points for which the KLT tracking failed or those who have gone outside the frame
    int indexCorrection = 0;
    for( int i=0; i<status.size(); i++) {
        Point2f pt = points2.at(i- indexCorrection);
        if ((status.at(i) == 0)||(pt.x<0)||(pt.y<0)) {
            if((pt.x<0)||(pt.y<0))  {
                status.at(i) = 0;
            }
            points1.erase (points1.begin() + i - indexCorrection);
            points2.erase (points2.begin() + i - indexCorrection);
            indexCorrection++;
        }
    }
}

Input:

Ball

Output (Markers denoted by *):

Ball with markers

I'm using OpenCV 4.5.4 built for MinGW


Solution

  • It looks like the findEssentialMat function does not work with a focal length of 0, setting it to 1 fixed the issue!