假設兩個攝像頭平行固定,所拍攝的圖像視差很小,可以通過“柱面投影+模闆比對+漸入漸出融合”的解決方案實作視訊拼接。
關于這種方法的靜态圖像拼接,參考圖像拼接(一):柱面投影+模闆比對+漸入漸出融合
OpenCV雙攝像頭捕獲視訊并實時顯示的代碼,參見:圖像拼接(三):OpenCV同時打開兩個攝像頭捕獲視訊
将代碼整合,實作雙攝像頭實時視訊拼接:
#include "opencv2/core/core.hpp"
#include "opencv2/highgui/highgui.hpp"
#include "opencv2/imgproc/imgproc.hpp"
#include<iostream>
using namespace cv;
using namespace std;
//以下3個函數,與系列文章:圖像拼接(一)...中的函數,相同
//柱面投影
Mat cylinder(Mat imgIn, int f);
//計算兩幅圖像之間的平移量
Point2i getOffset(Mat img, Mat img1);
//線性融合
Mat linearFusion(Mat img, Mat img1, Point2i a);
int main()
{
VideoCapture cap1();
VideoCapture cap2();
double rate = ;
int delay = / rate;
bool stop(false);
Mat frame1;
Mat frame2;
Mat frame;
Point2i a;//存儲偏移量
int k = ;
namedWindow("cam1", CV_WINDOW_AUTOSIZE);
namedWindow("cam2", CV_WINDOW_AUTOSIZE);
namedWindow("stitch", CV_WINDOW_AUTOSIZE);
if (cap1.isOpened()&&cap2.isOpened())
{
cout << "*** ***" << endl;
cout << "攝像頭已啟動!"<<endl;
}
else
{
cout << "*** ***" << endl;
cout << "警告:攝像頭打開不成功或者未檢測到有兩個攝像頭!" << endl;
cout << "程式結束!" <<endl<< "*** ***" << endl;
return -;
}
//cap1.set(CV_CAP_PROP_FRAME_WIDTH, 640);
//cap1.set(CV_CAP_PROP_FRAME_HEIGHT, 480);
//cap2.set(CV_CAP_PROP_FRAME_WIDTH, 640);
//cap2.set(CV_CAP_PROP_FRAME_HEIGHT, 480);
cap1.set(CV_CAP_PROP_FOCUS,);
cap2.set(CV_CAP_PROP_FOCUS, );
while (!stop)
{
if (cap1.read(frame1) && cap2.read(frame2))
{
imshow("cam1", frame1);
imshow("cam2", frame2);
//彩色幀轉灰階
cvtColor(frame1, frame1, CV_RGB2GRAY);
cvtColor(frame2, frame2, CV_RGB2GRAY);
//柱面投影變換
//frame1 = cylinder(frame1, 1005);
//frame2 = cylinder(frame2, 1005);
//比對和拼接
/*視訊拼接通過while循環實作,下面這個判斷的意思是,有兩
*種情形才計算平移參數,一是程式啟動時,前3個循環内;二
*是按下Enter鍵時。這樣在場景和攝像頭相對固定時,避免了平
*移量的重複計算,提高了拼接的實時性
*/
if (k < || waitKey(delay) == )//按Enter鍵
{
cout << "正在比對..."<<endl;
a = getOffset(frame1, frame2);
}
frame = linearFusion(frame1, frame2, a);
imshow("stitch", frame);
k++;
}
else
{
cout << "----------------------" << endl;
cout << "waitting..." << endl;
}
//按下ESC鍵,退出循環,程式結束
if (waitKey() == )
{
stop = true;
cout << "程式結束!" << endl;
cout << "*** ***" << endl;
}
}
return ;
}
//計算平移參數
Point2i getOffset(Mat img, Mat img1)
{
Mat templ(img1, Rect(, *img1.rows, *img1.cols, *img1.rows));
Mat result(img.cols - templ.cols + , img.rows - templ.rows + , CV_8UC1);//result存放比對位置資訊
matchTemplate(img, templ, result, CV_TM_CCORR_NORMED);
normalize(result, result, , , NORM_MINMAX, -, Mat());
double minVal; double maxVal; Point minLoc; Point maxLoc; Point matchLoc;
minMaxLoc(result, &minVal, &maxVal, &minLoc, &maxLoc, Mat());
matchLoc = maxLoc;//獲得最佳比對位置
int dx = matchLoc.x;
int dy = matchLoc.y - *img1.rows;//右圖像相對左圖像的位移
Point2i a(dx, dy);
return a;
}
//線性(漸入漸出)融合
Mat linearFusion(Mat img, Mat img1, Point2i a)
{
int d = img.cols - a.x;//過渡區寬度
int ms = img.rows - abs(a.y);//拼接圖行數
int ns = img.cols + a.x;//拼接圖列數
Mat stitch = Mat::zeros(ms, ns, CV_8UC1);
//拼接
Mat_<uchar> ims(stitch);
Mat_<uchar> im(img);
Mat_<uchar> im1(img1);
if (a.y >= )
{
Mat roi1(stitch, Rect(, , a.x, ms));
img(Range(a.y, img.rows), Range(, a.x)).copyTo(roi1);
Mat roi2(stitch, Rect(img.cols, , a.x, ms));
img1(Range(, ms), Range(d, img1.cols)).copyTo(roi2);
for (int i = ; i < ms; i++)
for (int j = a.x; j < img.cols; j++)
ims(i, j) = uchar((img.cols - j) / float(d)*im(i + a.y, j) + (j - a.x) / float(d)*im1(i, j - a.x));
}
else
{
Mat roi1(stitch, Rect(, , a.x, ms));
img(Range(, ms), Range(, a.x)).copyTo(roi1);
Mat roi2(stitch, Rect(img.cols, , a.x, ms));
img1(Range(-a.y, img.rows), Range(d, img1.cols)).copyTo(roi2);
for (int i = ; i < ms; i++)
for (int j = a.x; j < img.cols; j++)
ims(i, j) = uchar((img.cols - j) / float(d)*im(i, j) + (j - a.x) / float(d)*im1(i + abs(a.y), j - a.x));
}
return stitch;
}
//柱面投影校正
Mat cylinder(Mat imgIn, int f)
{
int colNum, rowNum;
colNum = * f*atan(*imgIn.cols / f);//柱面圖像寬
rowNum = *imgIn.rows*f / sqrt(pow(f, )) + *imgIn.rows;//柱面圖像高
Mat imgOut = Mat::zeros(rowNum, colNum, CV_8UC1);
Mat_<uchar> im1(imgIn);
Mat_<uchar> im2(imgOut);
//正向插值
int x1(), y1();
for (int i = ; i < imgIn.rows; i++)
for (int j = ; j < imgIn.cols; j++)
{
x1 = f*atan((j - *imgIn.cols) / f) + f*atan(*imgIn.cols / f);
y1 = f*(i - *imgIn.rows) / sqrt(pow(j - *imgIn.cols, ) + pow(f, )) + *imgIn.rows;
if (x1 >= && x1 < colNum&&y1 >= && y1<rowNum)
{
im2(y1, x1) = im1(i, j);
}
}
return imgOut;
}