opencv 模闆比對 matchTemplate
demo:http://download.csdn.net/detail/keen_zuxwang/9847695
模闆比對
在一幅圖像中尋找一個特定目标的方法。周遊圖像中的每一個位置,比較與模闆是否“相似”,當相似度足夠高時,就認為找到了目标。
常用于目标檢測、相似度分析
//! computes the proximity map for the raster template and the image where the template is searched for
CV_EXPORTS_W void matchTemplate( InputArray image, InputArray templ,
OutputArray result, int method );
功能:在輸入搜尋圖像中按照method方法比對模闆
image
用于搜尋的輸入圖像, 8U 或 32F, 大小 W-H
templ
比對模闆圖像,和image類型相同, 大小 w-h
result
比對結果圖像, 類型 32F, 大小 (W-w+1)-(H-h+1)
method
比對方法,根據實際情況而定:
TM_SQDIFF:
平方差進行比對,最佳的比對結果在結果為0處,值越大比對結果越差
TM_SQDIFF_NORMED:
歸一化的平方差進行比對,最佳比對在結果為0處
TM_CCORR:
相關性比對方法,該方法使用源圖像與模闆圖像的卷積結果進行比對,最佳比對位置在值最大處,值越小比對結果越差
TM_CCORR_NORMED:
歸一化的相關性比對方法,與相關性比對方法類似,最佳比對位置也是在值最大處
TM_CCOEFF:
相關性系數比對方法,該方法使用源圖像與其均值的差、模闆與其均值的差二者之間的相關性進行比對,最佳比對結果在值等于1處,最差比對結果在值等于-1處,值等于0直接表示二者不相關
TM_CCOEFF_NORMED:
歸一化的相關性系數比對方法,正值表示比對的結果較好,負值則表示比對的效果較差,也是值越大,比對效果也好
平方差比對到相關系數比對,比對的準确度越來越高(計算代價也随之升高). 通常可測試實驗,選擇同時兼顧速度和精度的最佳比對方法
//! finds global minimum and maximum array elements and returns their values and their locations
CV_EXPORTS_W void minMaxLoc(InputArray src, CV_OUT double* minVal,
CV_OUT double* maxVal=, CV_OUT Point* minLoc=,
CV_OUT Point* maxLoc=, InputArray mask=noArray());
JNI端程式:
tempray.cpp:
#include <string>
#include <jni.h>
#include <android/log.h>
#include <iostream>
#include <fstream>
#include <opencv2/core/core.hpp>
#include <opencv2/highgui/highgui.hpp>
#include <opencv2/imgproc/imgproc.hpp>
#include <opencv2/objdetect/objdetect.hpp>
#include <opencv2/ml/ml.hpp>
#ifdef __cplusplus
extern "C" {
#endif
//#define LOG_TAG "show infomation"
#define Template 100
#define LOG_TAG "libtracking"
#define LOGI(...) __android_log_print(ANDROID_LOG_INFO,LOG_TAG,__VA_ARGS__)
#define LOGD(...) __android_log_print(ANDROID_LOG_DEBUG,LOG_TAG,__VA_ARGS__)
#define LOGE(...) __android_log_print(ANDROID_LOG_ERROR,LOG_TAG,__VA_ARGS__)
using namespace cv;
using namespace std;
int result_cols;
int result_rows;
double minVal;
double maxVal;
Point minLoc;
Point maxLoc;
Point matchLoc;
int match_method = TM_CCORR;//CV_TM_SQDIFF;
JNIEXPORT void JNICALL Java_org_opencv_samples_tutorial2_Tutorial2Activity_grayProc(JNIEnv* env, jclass obj, jlong imageGray,
jint touch_x,jint touch_y)
{
int width,height;
int k = ,n = ,i,j;
CvScalar s;
Mat result;
Mat mat = Mat(*((Mat*)imageGray));
width = mat.rows;
height = mat.cols;
result_cols = height - Template + ;
result_rows = width - Template + ;
cv::Mat img = cv::Mat(Template,Template,CV_8UC3,cv::Scalar(,,));
IplImage pI = mat;
IplImage pI_2 = img;
Mat img_display = cv::Mat(width,height,CV_8UC3,cv::Scalar(,,));
IplImage pI_3 = img_display;
for(i=;i<width;i++){
for(j=;j<height;j++){
s = cvGet2D(&pI,i,j);
cvSet2D(&pI_3,i,j,s);
}
}
if((touch_x < (Template/)) || (touch_x>(height-(Template/))) || (touch_y < (Template/)) || (touch_y > (width-(Template/)))){
//__android_log_print(ANDROID_LOG_ERROR, "JNITag","touch_x,touch_y is too small or too large\n");
LOGD("touch_x = %d, touch_y = %d, width = %d, height = %d", touch_x, touch_y, width, height);
return;
}
for(i=(touch_x-(Template/));i<(touch_x+(Template/));i++){
for(j=(touch_y-(Template/));j<(touch_y+(Template/));j++){
s = cvGet2D(&pI,j,i);
cvSet2D(&pI_2,k,n,s);
n++;
}
k++;
n=;
}
/*
result.create(result_cols, result_rows, CV_32FC1 );// 模闆比對存儲Mat
*/
/// Do the Matching and Normalize
matchTemplate( img_display, img, img_display, match_method);
normalize(img_display, img_display, , , NORM_MINMAX, -, Mat() );
/// Localizing the best match with minMaxLoc
minMaxLoc(img_display, &minVal, &maxVal, &minLoc, &maxLoc, Mat() );
/// For SQDIFF and SQDIFF_NORMED, the best matches are lower values. For all the other methods, the higher the better
if(match_method == CV_TM_SQDIFF || match_method == CV_TM_SQDIFF_NORMED ){
matchLoc = minLoc;
}else{
matchLoc = maxLoc;
}
rectangle(mat, matchLoc, Point(matchLoc.x + img.cols , matchLoc.y + img.rows ), Scalar::all(), , , );
}
int run_time;
JNIEXPORT jlong JNICALL Java_org_opencv_samples_tutorial2_Tutorial2Activity_grayProc0(JNIEnv* env, jclass obj, jlong imageGray, jlong tempGray)
{
Mat img_display = Mat(*(Mat*)imageGray); //Mat: JAVA->JNI, 取位址
Mat img = Mat(*(Mat*)tempGray);
Mat input;
input.create(img_display.size(), CV_8UC3);
cvtColor(img_display, input, CV_BGRA2BGR);
/// Do the Matching and Normalize,按NORM_MINMAX進行比對後的歸一化
matchTemplate( img_display, img, img_display, match_method);
normalize(img_display, img_display, , , NORM_MINMAX, -, Mat());
// Localizing the best match with minMaxLoc,模闆在原圖像中比對的最大/最小值點及位置
minMaxLoc(img_display, &minVal, &maxVal, &minLoc, &maxLoc, Mat());
// For SQDIFF and SQDIFF_NORMED, the best matches are lower values. For all the other methods, the higher the better
if(match_method == CV_TM_SQDIFF || match_method == CV_TM_SQDIFF_NORMED ){
matchLoc = minLoc;
}else{
matchLoc = maxLoc;
}
LOGD("matchLoc.x = %d, matchLoc.y = %d, img.cols = %d, img.rows = %d", matchLoc.x, matchLoc.y, img.cols, img.rows);
//框出查找到的原圖像中比對的模闆圖像
rectangle(input, matchLoc, Point(matchLoc.x + img.cols , matchLoc.y + img.rows ), Scalar(,,), , , );//Scalar::all(0)
Mat *mat = new Mat(input);
return (jlong) mat; // 傳回位址,Mat: JNI->JAVA
}
#ifdef __cplusplus
}
#endif
TM_CCOEFF:

TM_SQDIFF: