1. 程式人生 > 實用技巧 >OpenCV常用影象拼接方法(三):基於特徵匹配拼接

OpenCV常用影象拼接方法(三):基於特徵匹配拼接

 OpenCV常用影象拼接方法將分為四個部分與大家共享,這裡是第三種方法,歡迎關注後續。

OpenCV的常用影象拼接方法(三):基於特徵匹配的影象拼接,本次介紹SIFT特徵匹配拼接方法,OpenCV版本為4.4.0。特點和適用範圍:影象有足夠重合相同特徵區域,且待拼接影象之間無明顯尺度變換和畸變。

優點:適應部分傾斜變化情況。缺點:需要有足夠的相同特徵區域進行匹配,速度較慢,拼接較大圖片容易崩潰。

如下是待拼接的兩張圖片:

特徵匹配圖:

拼接結果圖:

拼接縫處理後(拼接處過渡更自然):

核心程式碼:

/********************直接影象拼接函式*************************/
bool ImageOverlap0(Mat &img1, Mat &img2)
{
  Mat g1(img1, Rect(0, 0, img1.cols, img1.rows));  // init roi 
  Mat g2(img2, Rect(0, 0, img2.cols, img2.rows));

  cvtColor(g1, g1, COLOR_BGR2GRAY);
  cvtColor(g2, g2, COLOR_BGR2GRAY);

  vector<cv::KeyPoint> keypoints_roi, keypoints_img;  /* keypoints found using SIFT */
  Mat descriptor_roi, descriptor_img;                           /* Descriptors for SIFT */
  FlannBasedMatcher matcher;                                   /* FLANN based matcher to match keypoints */

  vector<cv::DMatch> matches, good_matches;
  cv::Ptr<cv::SIFT> sift = cv::SIFT::create();
  int i, dist = 80;

  sift->detectAndCompute(g1, cv::Mat(), keypoints_roi, descriptor_roi);      /* get keypoints of ROI image */
  sift->detectAndCompute(g2, cv::Mat(), keypoints_img, descriptor_img);         /* get keypoints of the image */
  matcher.match(descriptor_roi, descriptor_img, matches);  //實現描述符之間的匹配

  double max_dist = 0; double min_dist = 5000;
  //-- Quick calculation of max and min distances between keypoints 
  for (int i = 0; i < descriptor_roi.rows; i++)
  {
    double dist = matches[i].distance;
    if (dist < min_dist) min_dist = dist;
    if (dist > max_dist) max_dist = dist;
  }
  // 特徵點篩選
  for (i = 0; i < descriptor_roi.rows; i++)
  {
    if (matches[i].distance < 3 * min_dist)
    {
      good_matches.push_back(matches[i]);
    }
  }

  printf("%ld no. of matched keypoints in right image\n", good_matches.size());
  /* Draw matched keypoints */

  Mat img_matches;
  //繪製匹配
  drawMatches(img1, keypoints_roi, img2, keypoints_img,
    good_matches, img_matches, Scalar::all(-1),
    Scalar::all(-1), vector<char>(),
    DrawMatchesFlags::NOT_DRAW_SINGLE_POINTS);
  imshow("matches", img_matches);

  vector<Point2f> keypoints1, keypoints2;
  for (i = 0; i < good_matches.size(); i++)
  {
    keypoints1.push_back(keypoints_img[good_matches[i].trainIdx].pt);
    keypoints2.push_back(keypoints_roi[good_matches[i].queryIdx].pt);
  }
  //計算單應矩陣(仿射變換矩陣) 
  Mat H = findHomography(keypoints1, keypoints2, RANSAC);
  Mat H2 = findHomography(keypoints2, keypoints1, RANSAC);


  Mat stitchedImage;  //定義仿射變換後的影象(也是拼接結果影象)
  Mat stitchedImage2;  //定義仿射變換後的影象(也是拼接結果影象)
  int mRows = img2.rows;
  if (img1.rows > img2.rows)
  {
    mRows = img1.rows;
  }

  int count = 0;
  for (int i = 0; i < keypoints2.size(); i++)
  {
    if (keypoints2[i].x >= img2.cols / 2)
      count++;
  }
  //判斷匹配點位置來決定圖片是左還是右
  if (count / float(keypoints2.size()) >= 0.5)  //待拼接img2影象在右邊
  {
    cout << "img1 should be left" << endl;
    vector<Point2f>corners(4);
    vector<Point2f>corners2(4);
    corners[0] = Point(0, 0);
    corners[1] = Point(0, img2.rows);
    corners[2] = Point(img2.cols, img2.rows);
    corners[3] = Point(img2.cols, 0);
    stitchedImage = Mat::zeros(img2.cols + img1.cols, mRows, CV_8UC3);
    warpPerspective(img2, stitchedImage, H, Size(img2.cols + img1.cols, mRows));

    perspectiveTransform(corners, corners2, H);
    /*
    circle(stitchedImage, corners2[0], 5, Scalar(0, 255, 0), 2, 8);
    circle(stitchedImage, corners2[1], 5, Scalar(0, 255, 255), 2, 8);
    circle(stitchedImage, corners2[2], 5, Scalar(0, 255, 0), 2, 8);
    circle(stitchedImage, corners2[3], 5, Scalar(0, 255, 0), 2, 8); */
    cout << corners2[0].x << ", " << corners2[0].y << endl;
    cout << corners2[1].x << ", " << corners2[1].y << endl;
    imshow("temp", stitchedImage);
    //imwrite("temp.jpg", stitchedImage);

    Mat half(stitchedImage, Rect(0, 0, img1.cols, img1.rows));
    img1.copyTo(half);
    imshow("result", stitchedImage);
  }
  else  //待拼接影象img2在左邊
  {
    cout << "img2 should be left" << endl;
    stitchedImage = Mat::zeros(img2.cols + img1.cols, mRows, CV_8UC3);
    warpPerspective(img1, stitchedImage, H2, Size(img1.cols + img2.cols, mRows));
    imshow("temp", stitchedImage);

    //計算仿射變換後的四個端點
    vector<Point2f>corners(4);
    vector<Point2f>corners2(4);
    corners[0] = Point(0, 0);
    corners[1] = Point(0, img1.rows);
    corners[2] = Point(img1.cols, img1.rows);
    corners[3] = Point(img1.cols, 0);

    perspectiveTransform(corners, corners2, H2);  //仿射變換對應端點
    /*
    circle(stitchedImage, corners2[0], 5, Scalar(0, 255, 0), 2, 8);
    circle(stitchedImage, corners2[1], 5, Scalar(0, 255, 255), 2, 8);
    circle(stitchedImage, corners2[2], 5, Scalar(0, 255, 0), 2, 8);
    circle(stitchedImage, corners2[3], 5, Scalar(0, 255, 0), 2, 8); */
    cout << corners2[0].x << ", " << corners2[0].y << endl;
    cout << corners2[1].x << ", " << corners2[1].y << endl;

    Mat half(stitchedImage, Rect(0, 0, img2.cols, img2.rows));
    img2.copyTo(half);
    imshow("result", stitchedImage);

  }
  imwrite("result.bmp", stitchedImage);
  return true;
}

  拼接縫優化程式碼與完整原始碼素材將釋出在知識星球主題中。