Copyright: Attribution, allow others to create paper-based, and must distribute paper (based on the original license agreement with the same license Creative Commons )
from 0 to combat opencv N (7) - template matching, matching the shape of
Template matching, shape matching
1, the pixel value matches the region
Template image as the image convolution using the swath, calculated in some way comparison (difference between the distance and the degree of correlation, etc.) the similarity,
Find the maximum or minimum value in the result image (some different methods similar to larger values, some contrast) as the matching result.
Function: matchTemplate (src, templ, result, match_method)
Mat TempleMatch(int match_method, Mat&templ, Mat&img, Point& matchLoc, double* matchValue)
{
bool use_mask = false;
Mat result, mask;
bool method_accepts_mask = (CV_TM_SQDIFF == match_method || match_method == CV_TM_CCORR_NORMED);
if (use_mask && method_accepts_mask)
{
matchTemplate(img, templ, result, match_method, mask);
}
else
{
matchTemplate(img, templ, result, match_method);
}
/// Localizing the best match with minMaxLoc
double minVal; double maxVal; Point minLoc; Point maxLoc;
minMaxLoc(result, &minVal, &maxVal, &minLoc, &maxLoc, Mat());
/// For SQDIFF and SQDIFF_NORMED, the best matches are lower values. For all the other methods, the higher the better
if (match_method == TM_SQDIFF || match_method == TM_SQDIFF_NORMED)
{
matchLoc = minLoc;
*matchValue = minVal;
}
else
{
matchLoc = maxLoc;
*matchValue = maxVal;
}
return result;
}
2, matching contour
First find the outline of the template, find the moment feature profile. All outline and then find the search feature map, with contour feature template
Comparison, the difference is the smallest matching results.
Function: matchShapes (contours1, contours2, CV_CONTOURS_MATCH_I1, 0.0);
int ShapeMatch(Mat& srcImg, Mat& srcImg2)
{
cvtColor(srcImg, srcImg, CV_BGR2GRAY);
threshold(srcImg, srcImg, 150, 255, CV_THRESH_BINARY);
vector<vector<Point>> contours;
vector<Vec4i> hierarcy;
findContours(srcImg, contours, hierarcy, CV_RETR_EXTERNAL, CV_CHAIN_APPROX_NONE);
Mat src_gray;
cvtColor(srcImg2, src_gray, CV_BGR2GRAY);
threshold(src_gray, src_gray, 150, 255, CV_THRESH_BINARY);
vector<vector<Point>> contours2;
vector<Vec4i> hierarcy2;
findContours(src_gray, contours2, hierarcy2, CV_RETR_TREE, CV_CHAIN_APPROX_NONE);
for (int i = 0; i<contours2.size(); i++)
{
double matchRate = matchShapes(contours[0], contours2[i], CV_CONTOURS_MATCH_I1, 0.0);//形状匹配:值越小越相似
if (matchRate <= 0.1)
{
drawContours(srcImg2, contours2, i, Scalar(0, 255, 0), 2, 8);
}
}
imwrite("dst.jpg", srcImg2);
return 0;
}
3. Template Matching Test:
bool use_mask;
Mat img; Mat templ; Mat mask; Mat result;
const char* image_window = "Source Image";
const char* result_window = "Result window";
int match_method;
int max_Trackbar = 5;
/// Function Headers
void MatchingMethod(int, void*);
int main(int argc, char** argv)
{
img = imread("2.jpg", IMREAD_COLOR);
templ = imread("tp.jpg", IMREAD_COLOR);
if (argc > 3) {
use_mask = true;
mask = imread(argv[3], IMREAD_COLOR);
}
if (img.empty() || templ.empty() || (use_mask && mask.empty()))
{
cout << "Can't read one of the images" << endl;
return -1;
}
/// Create windows
namedWindow(image_window, WINDOW_AUTOSIZE);
namedWindow(result_window, WINDOW_AUTOSIZE);
/// Create Trackbar
const char* trackbar_label = "Method: \n 0: SQDIFF \n 1: SQDIFF NORMED \n 2: TM CCORR \n 3: TM CCORR NORMED \n 4: TM COEFF \n 5: TM COEFF NORMED";
createTrackbar(trackbar_label, image_window, &match_method, max_Trackbar, MatchingMethod);
//! [create_trackbar]
MatchingMethod(0, 0);
//! [wait_key]
waitKey(0);
return 0;
//! [wait_key]
}
void MatchingMethod(int, void*)
{
//! [copy_source]
/// Source image to display
Mat img_display;
img.copyTo(img_display);
/// Create the result matrix
int result_cols = img.cols - templ.cols + 1;
int result_rows = img.rows - templ.rows + 1;
result.create(result_rows, result_cols, CV_32FC1);
/// Do the Matching and Normalize
bool method_accepts_mask = (CV_TM_SQDIFF == match_method || match_method == CV_TM_CCORR_NORMED);
if (use_mask && method_accepts_mask)
{
matchTemplate(img, templ, result, match_method, mask);
}
else
{
matchTemplate(img, templ, result, match_method);
}
normalize(result, result, 0, 1, NORM_MINMAX, -1, Mat());
double minVal; double maxVal; Point minLoc; Point maxLoc;
Point matchLoc;
minMaxLoc(result, &minVal, &maxVal, &minLoc, &maxLoc, Mat());
/// For SQDIFF and SQDIFF_NORMED, the best matches are lower values. For all the other methods, the higher the better
if (match_method == TM_SQDIFF || match_method == TM_SQDIFF_NORMED)
{
matchLoc = minLoc;
}
else
{
matchLoc = maxLoc;
}
rectangle(img_display, matchLoc, Point(matchLoc.x + templ.cols, matchLoc.y + templ.rows), Scalar::all(0), 2, 8, 0);
rectangle(result, matchLoc, Point(matchLoc.x + templ.cols, matchLoc.y + templ.rows), Scalar::all(0), 2, 8, 0);
imshow(image_window, img_display);
imshow(result_window, result);
return;
}
More attention to micro-channel public number: ML_Study