OpenCV实践之SIFT/SURF算法

OpenCV之SIFT/SURF算法接口介绍

  早先OpenCV版本对SIFT/SURF算法可以直接调用,自OpenCV2.x之后就将SIFT/SURF等申请专利算法封装在opencv_contrib库(点集下载)函数中,需要手动下载与编译!编译过程需要CMake与Visual Studio进行,关于具体的编译过程网上存在很多教程基本都是正确的,我这里只是简单强调一下下载opencv_contrib时候需要注意事项如下图:


  如上图所示,在下载opencv_contrib过程中,需要选择自己对应的版本,不然编译会出错。Ok,下满我们来看一下OpenCV中的SIFT/SURF算法对外接口函数:

  SIFT/SURF算法在OpenCV源码nonfree.hpp库中,在命名空间xfeatures2d里面,下面对上图SIFT对外接口create()函数作一个简单介绍:

SURF算法对外接口如下:

  SURF算法除了create()函数之外,还对外开放单独的参数设置接口:例如setHessianThreshold()、getHessianThreshold()等如上图。下面对其create()函数简单解释:

介绍完基本的SIFT/SURF算法接口信息之后,下面我们就直接上代码:

#include <iostream>
#include <opencv2\opencv.hpp>
#include <opencv2\features2d.hpp>
#include <opencv2\xfeatures2d.hpp>
#include <vector>
#include "vfc.h"

using namespace std;
using namespace cv;

int main(void)
{
    // 读取输入图像
    Mat img1 = imread("../opencv_SIFT/image/box.png", 0);
    Mat img2 = imread("../opencv_SIFT/image/box_in_scene.png", 0);
    // SIFT 特征点提取 描述子 采用默认参数
    Ptr<Feature2D> feature2D = xfeatures2d::SIFT::create();  
    // SURF 特征点提取 描述子 采用默认参数
    //Ptr<Feature2D> feature2D = xfeatures2d::SURF::create();  

    double features_t = (double)getTickCount();
    //-- Step 1: Compute features with kpts 
    vector<KeyPoint> kpts1, kpts2;
    feature2D->detect(img1, kpts1);
    feature2D->detect(img2, kpts2);
    features_t = ((double)getTickCount() - features_t) / getTickFrequency();
    cout << "Features Times (s): " << features_t << " s" << endl;

    double desc_t = (double)getTickCount();
    //-- Step 2: Compute features' descriptors 
    Mat desc1, desc2;
    feature2D->compute(img1, kpts1, desc1);
    feature2D->compute(img2, kpts2, desc2);
    // 当然,你可以直接使用detectAndCompute()函数进行特征点与描述子提取
    // detectAndCompute()最后一个参数为false表示需要进行特征点提取
    //feature2D->detectAndCompute(img1, Mat(), kpts1, desc1, false);
    //feature2D->detectAndCompute(img2, Mat(), kpts2, desc2, false);
    desc_t = ((double)getTickCount() - desc_t) / getTickFrequency();
    cout << "Descriptors Times (s): " << desc_t << " s" << endl;
    //////////////图像局部特征匹配BF暴力匹配或者FLANN匹配///////////////
    double match_t = (double)getTickCount();
    //BFMatcher matcher;
    FlannBasedMatcher matcher;
    vector<DMatch> matches;

    matcher.match(desc1, desc2, matches);
    match_t = ((double)getTickCount() - match_t) / getTickFrequency();
    cout << "Match Times (s): " << match_t << " s"<< endl;

    Mat showMatch;
    drawMatches(img1, kpts1, img2, kpts2, matches, showMatch, Scalar::all(-1), \
                Scalar::all(-1), vector<char>(), DrawMatchesFlags::NOT_DRAW_SINGLE_POINTS);

    namedWindow("RoughMatchWithOutVFC");
    imshow("RoughMatchWithOutVFC", showMatch);
    ///////////////////////匹配点对提纯策略///////////////////////
    //-- Step 4: Remove mismatches by vector field consensus (VFC)
    vector<Point2f> X;
    vector<Point2f> Y;
    X.clear();
    Y.clear();
    for (unsigned int i = 0; i < matches.size(); i++) {
        int idx1 = matches[i].queryIdx;
        int idx2 = matches[i].trainIdx;
        X.push_back(kpts1[idx1].pt);
        Y.push_back(kpts2[idx2].pt);
    }
    // VFC process
    double vfc_t = (double)getTickCount();
    VFC myvfc;
    myvfc.setData(X, Y);
    myvfc.optimize();
    vector<int> matchIdx = myvfc.obtainCorrectMatch();
    vfc_t = ((double)getTickCount() - vfc_t) / getTickFrequency();
    cout << "PreciseMatch Times (s): " << vfc_t << " s" << endl;

    vector< DMatch > correctMatches;
    vector<KeyPoint> correctKeypoints_1, correctKeypoints_2;
    correctMatches.clear();
    for (unsigned int i = 0; i < matchIdx.size(); i++) {
        int idx = matchIdx[i];
        correctMatches.push_back(matches[idx]);
        correctKeypoints_1.push_back(kpts1[idx]);
        correctKeypoints_2.push_back(kpts2[idx]);
    }

    Mat img_correctMatches;
    drawMatches(img1, kpts1, img2, kpts2, correctMatches, img_correctMatches, Scalar::all(-1), \
                Scalar::all(-1), vector<char>(), DrawMatchesFlags::NOT_DRAW_SINGLE_POINTS);

    namedWindow("PreciseMatchWithVFC");
    imshow("PreciseMatchWithVFC", img_correctMatches);
    imwrite("../opencv_SIFT/image/match.png", img_correctMatches);
    waitKey(0);

    return 0;
}
实验结果

SIFT算法匹配结果(左) SURF算法匹配结果(右)

参考

https://opencv.org/
http://www.cs.ubc.ca/~lowe/

猜你喜欢

转载自blog.csdn.net/Small_Munich/article/details/80149951