The orb feature descriptor opens the camera to match the image object

//---------------------------------[Header file, namespace contains part]------ ----------------------
// Description: Contains header files and namespaces used by the program
//------------------------------------------------------------------------------------------------
#include <opencv2/opencv.hpp>
#include <opencv2/highgui/highgui.hpp>
#include <opencv2/nonfree/features2d.hpp>
#include <opencv2/features2d/features2d.hpp>
using namespace cv;
using namespace std;



//--------------------------------------[main( ) function]---- -------------------------------------
// Description: The entry function of the console application, our program starts from here
//-----------------------------------------------------------------------------------------------
intmain()
{

	//[0] Load the source image, display and convert it to grayscale
	Mat srcImage = imread("1.jpg");
	if (!srcImage.data)
	{
		cout << "there is no picture" << endl;
		getchar();
		return false;
	}
	imshow("Original Image", srcImage);
	Mat grayImage;
	cvtColor (srcImage, grayImage, CV_BGR2GRAY);

	//-----------------Detect SIFT feature points and extract object descriptors in the image------------------- -----

	//[1] Parameter definition
	OrbFeatureDetector featureDetector;
	vector<KeyPoint> keyPoints;
	Mat descriptors;

	//[2] Call the detect function to detect the feature key points and save them in the vector container
	featureDetector.detect(grayImage, keyPoints);

	//[3] Calculate descriptor (feature vector)
	OrbDescriptorExtractor featureExtractor;
	featureExtractor.compute(grayImage, keyPoints, descriptors);

	//[4] Descriptor object matching based on FLANN
	flann::Index flannIndex(descriptors, flann::LshIndexParams(12, 20, 2), cvflann::FLANN_DIST_HAMMING);

	//[5] Initialize the video capture object
	VideoCapture cap(0);

	unsigned int frameCount = 0;//Number of frames

	//[6] Poll until the ESC key is pressed to exit the loop
	while (1)
	{
		double time0 = static_cast<double>(getTickCount());//Record start time
		Mat captureImage, captureImage_gray;//Define two Mat variables for video capture
		cap >> captureImage;//Capture video frame
		if (captureImage.empty())//The processing of acquisition is empty
		{
			cout << "cannot open camera" << endl;
			continue;
		}

		//Convert image to grayscale
		cvtColor(captureImage, captureImage_gray, CV_BGR2GRAY);//The captured video frame is converted into a grayscale image

		//[7] Detect SIFT keypoints and extract descriptors in the test image
		vector<KeyPoint> captureKeyPoints;
		Mat captureDescription;

		//[8] Call the detect function to detect the feature key points and save them in the vector container
		featureDetector.detect(captureImage_gray, captureKeyPoints);

		//[9] Calculate descriptor
		featureExtractor.compute(captureImage_gray, captureKeyPoints, captureDescription);

		//[10] Match and test descriptors to get the two closest descriptors
		Mat matchIndex(captureDescription.rows, 2, CV_32SC1), matchDistance(captureDescription.rows, 2, CV_32FC1);
		flannIndex.knnSearch(captureDescription, matchIndex, matchDistance, 2, flann::SearchParams());//Call the K neighbor algorithm

		//[11] Select excellent matches according to Lowe's algorithm
		vector<DMatch> goodMatches;
		for (int i = 0; i < matchDistance.rows; i++)
		{
			if (matchDistance.at<float>(i, 0) < 0.6 * matchDistance.at<float>(i, 1))
			{
				DMatch dmatches(i, matchIndex.at<int>(i, 0), matchDistance.at<float>(i, 0));
				goodMatches.push_back(dmatches);
			}
		}

		//[12] Draw and display the matching window
		Mat resultImage;
		drawMatches(captureImage, captureKeyPoints, srcImage, keyPoints, goodMatches, resultImage);
		imshow("Match window", resultImage);

		//[13] Display frame rate
		cout << ">帧率= " << getTickFrequency() / (getTickCount() - time0) << endl;

		// Press the ESC key to exit the program
		if (char(waitKey(1)) == 27) break;
	}

	return 0;
}


Guess you like

Origin http://43.154.161.224:23101/article/api/json?id=325572013&siteId=291194637