视觉里程计01

orb特征

概念网上描述的很详细,这里简单说一下

  • 采用改进的FAST特征点,在FAST特征点提取的基础上加入多层金字塔来确定不同尺度下的特征点
  • 用ID3方法来确定最优特征点
  • 采用非极大值抑制去除局部较密集特征点
  • 尺度不变性用金字塔来确定,旋转不变性用图像质心的夹角来确定
  • 描述子采用BRIEF

总的来说该算法比surf还要快,而且准确率也很不错。

orb特征提取与跟踪代码

主体部分来自高翔的视觉SLAM14讲的代码,在开头加入了摄像头读取的代码替换掉固定的图片演示。这里用双目摄像头来进行匹配,实际上视觉里程计采用单目即可。

#include <opencv2/core.hpp>
#include <opencv2/highgui.hpp>
#include <opencv2/videoio.hpp>
#include <iostream>
#include "opencv2/features2d/features2d.hpp"
#include <vector>
#include <time.h>

using namespace cv;
using namespace std;

int main()
{
	VideoCapture cap1;
	VideoCapture cap2;
	cap1.open(1);//白色摄像头
	cap2.open(2);//黑色摄像头
	if (!cap1.isOpened()||!cap2.isOpened())
	{
		return -1;
	}
	//将摄像头从640*480改成320*240,速度从200ms提升至50ms
	cap1.set(CV_CAP_PROP_FRAME_WIDTH, 320);
	cap1.set(CV_CAP_PROP_FRAME_HEIGHT, 240);
	cap2.set(CV_CAP_PROP_FRAME_WIDTH, 320);
	cap2.set(CV_CAP_PROP_FRAME_HEIGHT, 240);
	//namedWindow("Video", 1);
	//namedWindow("Video", 2);
	//namedWindow("pts", 3);
	//Mat frame;
	
	Mat img_1;
	Mat img_2;
	while (1)
	{
		cap1 >> img_1;
		cap2 >> img_2;
		if (!img_1.data || !img_2.data)
		{
			cout << "error reading images " << endl;
			return -1;
		}
		//初始化
		clock_t startTime, endTime;
		startTime = clock();
		
		Ptr<ORB> orb = ORB::create(500, 1.2F, 8, 31, 0, 2, ORB::HARRIS_SCORE, 31, 20);//均为默认参数
		vector<KeyPoint> keyPoints_1, keyPoints_2;
		Mat descriptors_1, descriptors_2;

		//orb检测角点
		orb->detect(img_1, keyPoints_1);
		orb->detect(img_2, keyPoints_2);

		if (keyPoints_1.size() == 0 || keyPoints_2.size() == 0)
		{
			continue;
		}
		//计算描述子
		orb->compute(img_1, keyPoints_1, descriptors_1);
		orb->compute(img_2, keyPoints_2, descriptors_2);

		//匹配特征点,Hamming距离
		vector<DMatch> matches;
		BFMatcher matcher(NORM_HAMMING);
		matcher.match(descriptors_1, descriptors_2, matches);

		//筛选匹配点
		double min_dist = matches[0].distance, max_dist = matches[0].distance;

		for (int i = 0; i < descriptors_1.rows; i++)
		{
			double dist = matches[i].distance;
			if (dist < min_dist)
				min_dist = dist;
			if (dist > max_dist)
				max_dist = dist;
		}

		printf("max: %f
", max_dist);
		printf("min: %f
", min_dist);

		//当描述子之间的距离大于两倍的最小距离时,即认为匹配有误.但有时候最小距离会非常小,设置一个经验值30作为下限.
		std::vector< DMatch > good_matches;
		for (int i = 0; i < descriptors_1.rows; i++)
		{
			if (matches[i].distance <= max(2 * min_dist, 30.0))
			{
				good_matches.push_back(matches[i]);
			}
		}
		endTime = clock();
		cout << "Totle Time : " << (double)(endTime - startTime) / CLOCKS_PER_SEC << "s" << endl;
		printf("goodmatches number:%d
", good_matches.size());
		//-- 第五步:绘制匹配结果
		/*Mat img_match;
		Mat img_goodmatch;
		drawMatches(img_1, keyPoints_1, img_2, keyPoints_2, matches, img_match);
		drawMatches(img_1, keyPoints_1, img_2, keyPoints_2, good_matches, img_goodmatch);
		imshow("所有匹配点对", img_match);
		imshow("优化后匹配点对", img_goodmatch);
		waitKey(1);*/
	}
	cap1.release();
	cap2.release();
	return 0;
}

测试效果

原文地址:https://www.cnblogs.com/RegressionWorldLine/p/7471723.html