天天看點

基于ORB特征點比對的對極幾何限制實作(源碼+講解)

//前幾篇部落格說了雙目校正回顧一下代碼

stereoCalibrate(objectPoints, image_leftPoints,  image_rightPoints, intrinsic_left, distCoeffs_left, intrinsic_right, distCoeffs_right, s1, R_total, T_total, E, F);
		printf("Starting Rectification\n");
stereoRectify(intrinsic_left, distCoeffs_left, intrinsic_right, distCoeffs_right, s1, R_total, T_total, R_L, R_R, P1, P2, Q, CALIB_ZERO_DISPARITY, -1, s1, &validROIL, &validROIR);
           
基于ORB特征點比對的對極幾何限制實作(源碼+講解)
基于ORB特征點比對的對極幾何限制實作(源碼+講解)

//這種校正方式,實作通過物點像點内參以及畸變矩陣求得兩個相機相對的R,T

//再将R,T拆分成左右相機的r,t

//參考SLAM十四講的解法,思路是先求出ORB比對點(我上一篇部落格有源碼),通過比對點,焦距f,光軸與成像面的交點坐标求出E,SVD方法分解E求出兩個相機相對的R,T

//計算本質矩陣
        Point2d principal_point(3251,249.7);//相機光心 标定值
        double focal_lengh=521 ;//相機焦距
        Mat essential_matrix;
        essential_matrix=findEssentialMat(points1,points2,focal_lengh,principal_point);
        cout<<"essential_matrix is"<<endl<<essential_matrix<<endl;
         //從本質矩陣恢複旋轉,平移
        recoverPose(essential_matrix,points1,points2,R,t,focal_lengh,principal_point);
        cout<<"R :"<<R<<endl;
        cout<<"t is"<<t<<endl;
           

//先求ORB比對點

oid find_feature_matches(const Mat &img_1, const Mat &img_2,
                          std::vector<KeyPoint> &keypoints_1,
                          std::vector<KeyPoint> &keypoints_2,
                          std::vector<DMatch> &matches) {
  //-- 初始化
  Mat descriptors_1, descriptors_2;
  // used in OpenCV3
  //FeatureDetector是虛類,通過定義FeatureDetector對象使用多種特征檢測方法  
  Ptr<FeatureDetector> detector = ORB::create();
  
  //DescriptorExtractor提取關鍵點的描述子基類
  Ptr<DescriptorExtractor> descriptor = ORB::create();

  //描述子比對方式,此處是漢明距離
  Ptr<DescriptorMatcher> matcher = DescriptorMatcher::create("BruteForce-Hamming");
  
  //-- 第一步:檢測 Oriented FAST 角點位置
  detector->detect(img_1, keypoints_1);
  detector->detect(img_2, keypoints_2);

  //-- 第二步:根據角點位置計算 BRIEF 描述子
  descriptor->compute(img_1, keypoints_1, descriptors_1);
  descriptor->compute(img_2, keypoints_2, descriptors_2);

  //-- 第三步:對兩幅圖像中的BRIEF描述子進行比對,使用 Hamming 距離
  //match中存放比對的描述子資訊
  vector<DMatch> match;
  matcher->match(descriptors_1, descriptors_2, match);

  //-- 第四步:比對點對篩選
  double min_dist = 10000, max_dist = 0;

  //找出所有比對之間的最小距離和最大距離, 即是最相似的和最不相似的兩組點之間的距離
  for (int i = 0; i < descriptors_1.rows; i++) {
    double dist = match[i].distance;
    if (dist < min_dist) min_dist = dist;
    if (dist > max_dist) max_dist = dist;
  }

  printf("-- Max dist : %f \n", max_dist);
  printf("-- Min dist : %f \n", min_dist);

  //當描述子之間的距離大于兩倍的最小距離時,即認為比對有誤.但有時候最小距離會非常小,設定一個經驗值30作為下限.
  for (int i = 0; i < descriptors_1.rows; i++) {
    if (match[i].distance <= max(2 * min_dist, 30.0)) {
      matches.push_back(match[i]);
    }
  }
}
           

//通過比對點求本質矩陣E,基礎矩陣F,以及相機間的R,T

void pose_estimation_2d2d(std::vector<KeyPoint> keypoints_1, std::vector<KeyPoint> keypoints_2, std::vector<DMatch> matches, cv::Mat& R, cv::Mat& t)
{
    //相機内參矩陣
    Mat K=(Mat_<double>(3,3)<<520.9,0,325.1,0,521,0,249.7,0,0,1);
    
    //把比對點轉換為vector<point2f>的形式
    vector<Point2f> points1;
    vector<Point2f> points2;
    
    for(int i=0;i<(int)matches.size();i++)
    {
        points1.push_back(keypoints_1[matches[i].queryIdx].pt);
        points2.push_back(keypoints_2[matches[i].queryIdx].pt);
    }
        
        //計算基礎矩陣F
        Mat fundamental_matrix;
        fundamental_matrix=findFundamentalMat(points1,points2,CV_FM_8POINT);
        cout << "FUNDAMENTAL_MATRIX" <<endl<<fundamental_matrix<<endl;
        
        //計算本質矩陣
        Point2d principal_point(3251,249.7);//相機光心 标定值
        double focal_lengh=521 ;//相機焦距
        Mat essential_matrix;
        essential_matrix=findEssentialMat(points1,points2,focal_lengh,principal_point);
        cout<<"essential_matrix is"<<endl<<essential_matrix<<endl;
        
        //計算單應性矩陣
        //本列非平面
        Mat homography_matrix;
        homography_matrix=findHomography(points1,points2,RANSAC,3);
        cout<<"homography_matrix is"<<endl<<homography_matrix<<endl;
        
        //從本質矩陣恢複旋轉,平移
        recoverPose(essential_matrix,points1,points2,R,t,focal_lengh,principal_point);
        cout<<"R :"<<R<<endl;
        cout<<"t is"<<t<<endl;
        
 }

           

//完整源碼如下

#include <iostream>
#include <opencv2/core/core.hpp>
#include <opencv2/features2d/features2d.hpp>
#include <opencv2/highgui/highgui.hpp>
#include <opencv2/calib3d/calib3d.hpp>
using namespace std;
using namespace cv;
void find_feature_matches(
  const Mat &img_1, const Mat &img_2,
  std::vector<KeyPoint> &keypoints_1,
  std::vector<KeyPoint> &keypoints_2,
  std::vector<DMatch> &matches);

void pose_estimation_2d2d(
  std::vector<KeyPoint> keypoints_1,
  std::vector<KeyPoint> keypoints_2,
  std::vector<DMatch> matches,
  Mat &R, Mat &t);

// 像素坐标轉相機歸一化坐标
Point2d pixel2cam(const Point2d &p, const Mat &K);

int main(int argc, char **argv) {
    std::cout << "Hello, world!" << std::endl;
//       if (argc != 3) {
//     cout << "usage: pose_estimation_2d2d img1 img2" << endl;
//     return 1;
//   }
   Mat img_1 = imread("/home/yang/Pictures/1.png");
  Mat img_2 = imread("/home/yang/Pictures/2.png");
   assert(img_1.data && img_2.data && "Can not load images!");
   vector<KeyPoint> keypoints_1, keypoints_2;
  vector<DMatch> matches;
  find_feature_matches(img_1, img_2, keypoints_1, keypoints_2, matches);
  cout << "一共找到了" << matches.size() << "組比對點" << endl;
  
  //估計兩張圖像間的運動
  Mat R,t;
  pose_estimation_2d2d(keypoints_1,keypoints_2,matches,R,t);
  
//   //驗證E=t^R*scale
//    Mat t_x =
//     (Mat_<double>(3, 3) << 0, -t.at<double>(2, 0), t.at<double>(1, 0),
//       t.at<double>(2, 0), 0, -t.at<double>(0, 0),
//       -t.at<double>(1, 0), t.at<double>(0, 0), 0);
// 
//   cout << "t^R=" << endl << t_x * R << endl;
// 
//   //-- 驗證對極限制
//   Mat K = (Mat_<double>(3, 3) << 520.9, 0, 325.1, 0, 521.0, 249.7, 0, 0, 1);
//   for (DMatch m: matches) {
//     Point2d pt1 = pixel2cam(keypoints_1[m.queryIdx].pt, K);
//     Mat y1 = (Mat_<double>(3, 1) << pt1.x, pt1.y, 1);
//     Point2d pt2 = pixel2cam(keypoints_2[m.trainIdx].pt, K);
//     Mat y2 = (Mat_<double>(3, 1) << pt2.x, pt2.y, 1);
//     Mat d = y2.t() * t_x * R * y1;
//     cout << "epipolar constraint = " << d << endl;
//   }
    return 0;
}


void find_feature_matches(const Mat &img_1, const Mat &img_2,
                          std::vector<KeyPoint> &keypoints_1,
                          std::vector<KeyPoint> &keypoints_2,
                          std::vector<DMatch> &matches) {
  //-- 初始化
  Mat descriptors_1, descriptors_2;
  // used in OpenCV3
  //FeatureDetector是虛類,通過定義FeatureDetector對象使用多種特征檢測方法  
  Ptr<FeatureDetector> detector = ORB::create();
  
  //DescriptorExtractor提取關鍵點的描述子基類
  Ptr<DescriptorExtractor> descriptor = ORB::create();

  //描述子比對方式,此處是漢明距離
  Ptr<DescriptorMatcher> matcher = DescriptorMatcher::create("BruteForce-Hamming");
  
  //-- 第一步:檢測 Oriented FAST 角點位置
  detector->detect(img_1, keypoints_1);
  detector->detect(img_2, keypoints_2);

  //-- 第二步:根據角點位置計算 BRIEF 描述子
  descriptor->compute(img_1, keypoints_1, descriptors_1);
  descriptor->compute(img_2, keypoints_2, descriptors_2);

  //-- 第三步:對兩幅圖像中的BRIEF描述子進行比對,使用 Hamming 距離
  //match中存放比對的描述子資訊
  vector<DMatch> match;
  matcher->match(descriptors_1, descriptors_2, match);

  //-- 第四步:比對點對篩選
  double min_dist = 10000, max_dist = 0;

  //找出所有比對之間的最小距離和最大距離, 即是最相似的和最不相似的兩組點之間的距離
  for (int i = 0; i < descriptors_1.rows; i++) {
    double dist = match[i].distance;
    if (dist < min_dist) min_dist = dist;
    if (dist > max_dist) max_dist = dist;
  }

  printf("-- Max dist : %f \n", max_dist);
  printf("-- Min dist : %f \n", min_dist);

  //當描述子之間的距離大于兩倍的最小距離時,即認為比對有誤.但有時候最小距離會非常小,設定一個經驗值30作為下限.
  for (int i = 0; i < descriptors_1.rows; i++) {
    if (match[i].distance <= max(2 * min_dist, 30.0)) {
      matches.push_back(match[i]);
    }
  }
}
void pose_estimation_2d2d(std::vector<KeyPoint> keypoints_1, std::vector<KeyPoint> keypoints_2, std::vector<DMatch> matches, cv::Mat& R, cv::Mat& t)
{
    //相機内參矩陣
    Mat K=(Mat_<double>(3,3)<<520.9,0,325.1,0,521,0,249.7,0,0,1);
    
    //把比對點轉換為vector<point2f>的形式
    vector<Point2f> points1;
    vector<Point2f> points2;
    
    for(int i=0;i<(int)matches.size();i++)
    {
        points1.push_back(keypoints_1[matches[i].queryIdx].pt);
        points2.push_back(keypoints_2[matches[i].queryIdx].pt);
    }
        
        //計算基礎矩陣F
        Mat fundamental_matrix;
        fundamental_matrix=findFundamentalMat(points1,points2,CV_FM_8POINT);
        cout << "FUNDAMENTAL_MATRIX" <<endl<<fundamental_matrix<<endl;
        
        //計算本質矩陣
        Point2d principal_point(3251,249.7);//相機光心 标定值
        double focal_lengh=521 ;//相機焦距
        Mat essential_matrix;
        essential_matrix=findEssentialMat(points1,points2,focal_lengh,principal_point);
        cout<<"essential_matrix is"<<endl<<essential_matrix<<endl;
        
        //計算單應性矩陣
        //本列非平面
        Mat homography_matrix;
        homography_matrix=findHomography(points1,points2,RANSAC,3);
        cout<<"homography_matrix is"<<endl<<homography_matrix<<endl;
        
        //從本質矩陣恢複旋轉,平移
        recoverPose(essential_matrix,points1,points2,R,t,focal_lengh,principal_point);
        cout<<"R :"<<R<<endl;
        cout<<"t is"<<t<<endl;
        
 }
           

Hello, world!

– Max dist : 95.000000

– Min dist : 4.000000

一共找到了79組比對點

FUNDAMENTAL_MATRIX

[3.001782649065985e-07, 2.929766978702088e-06, -0.000812504944771384;

1.384506096893384e-06, 1.553664078779717e-05, -0.004052901633871815;

-0.0003407878720942878, -0.003840171520134294, 0.9999999999999999]

essential_matrix is

[-0.006077533157631877, -0.1156225435865585, -0.04016345885077072;

0.1223807210532025, -0.0896032038362659, 0.6901583208965004;

-0.008777404841446328, -0.6917849899348304, -0.08374717617716196]

homography_matrix is

[-0.2439101567348537, -0.1253168864446675, 83.55428831827061;

-0.5559188156021688, -0.3395671494116975, 206.6090664207155;

-0.00272012637686612, -0.001630296890504818, 0.9999999999999999]

R :[0.9987471128459281, -0.04904489612610739, -0.009939957037246538;

0.04988382153771165, 0.9915337305390732, 0.1198852182383724;

0.003976044606982226, -0.120230858631417, 0.9927379975109423]

t is[0.9848668819474229;

0.03676185726732396;

-0.1693688008265328]

繼續閱讀