相机位姿和相机外参定义以及它们的关系(画出相机位置)

最近研究相机,看一些相机位姿和相机外参,但总是将世界坐标系和相机坐标系进行记错,,也不理解什么是相机位姿和相机外参的关系,在实际应用中发现很多问题。所以这几天在实际应用中,重新对于相机坐标和世界坐标系的讨论和实践,然后整理了一下。
看了一下《视觉SLAM》,
在这里插入图片描述
画出相机在世界坐标系的位置和它的朝向代码(z轴就是朝向)它保存为obj文件如下:

void SaveCamPoseIntoOBJ(std::vector<Eigen::Matrix4f>& cams_pose, const std::string& path)
	{
    
    
		std::ofstream fh(path);
		int step_size = 1000;
		float step = 0.05;

		for (int i = 0; i < cams_pose.size(); ++i)
		{
    
    

			Eigen::Matrix3f R_wc = cams_pose[i].topLeftCorner(3, 3);
			Eigen::Vector3f T_wc = cams_pose[i].topRightCorner(3, 1);

			--------------------三根轴------------------------//
			//x轴
			Eigen::Vector3f local_x = R_wc.col(0);
			//y轴
			Eigen::Vector3f local_y = R_wc.col(1);
			//z轴
			Eigen::Vector3f local_z = R_wc.col(2);

			std::vector<Eigen::Vector3f> cam_x_local_pnts;
			std::vector<Eigen::Vector3f> cam_y_local_pnts;
			std::vector<Eigen::Vector3f> cam_z_local_pnts;

			for (int i = 0; i < step_size; ++i)
			{
    
    
				//x轴
				float x_x = T_wc(0) + i * step * local_x(0);
				float x_y = T_wc(1) + i * step * local_x(1);
				float x_z = T_wc(2) + i * step * local_x(2);
				Eigen::Vector3f tmp_x(x_x, x_y, x_z);
				cam_x_local_pnts.emplace_back(tmp_x);
				//y轴
				float y_x = T_wc(0) + i * step * local_y(0);
				float y_y = T_wc(1) + i * step * local_y(1);
				float y_z = T_wc(2) + i * step * local_y(2);
				Eigen::Vector3f tmp_y(y_x, y_y, y_z);
				cam_y_local_pnts.emplace_back(tmp_y);
				//z轴
				float z_x = T_wc(0) + i * step * local_z(0);
				float z_y = T_wc(1) + i * step * local_z(1);
				float z_z = T_wc(2) + i * step * local_z(2);
				Eigen::Vector3f tmp_z(z_x, z_y, z_z);
				cam_z_local_pnts.emplace_back(tmp_z);
			}
			for (int i = 0; i < step_size; ++i)
			{
    
    
				fh << "v " << cam_x_local_pnts[i](0) << " " << cam_x_local_pnts[i](1) << " " << cam_x_local_pnts[i](2) <<
					" " << 255 << " " << 0 << " " << 0 << std::endl;
				fh << "v " << cam_y_local_pnts[i](0) << " " << cam_y_local_pnts[i](1) << " " << cam_y_local_pnts[i](2) <<
					" " << 0 << " " << 255 << " " << 0 << std::endl;
				fh << "v " << cam_z_local_pnts[i](0) << " " << cam_z_local_pnts[i](1) << " " << cam_z_local_pnts[i](2) <<
					" " << 0 << " " << 0 << " " << 255 << std::endl;
			}
		}
		fh.close();
	}

上面的代码使用的是点云画出相机的三根轴的方向,可能不好用,可以使用另一种线段方式表达相机的三根轴。同样保存为obj文件,它和上面的效果一样。

void SaveAllCamsPoseIntoObj(const std::string& path, std::vector<Eigen::Matrix4d> cmps,  float aixlen)
    {
    
    
        for(std::size_t i = 0; i < cmps.size(); ++i)
        {
    
    
            //x,y,z aix
            Eigen::Vector3d lx = cmps[i].block<3,1>(0,0);
            Eigen::Vector3d ly = cmps[i].block<3,1>(0,1);
            Eigen::Vector3d lz = cmps[i].block<3,1>(0,2);
            Eigen::Vector3d t = cmps[i].block<3,1>(0,3);
            lx.normalize();
            ly.normalize();
            lz.normalize();
            Eigen::Vector3d ex = t+aixlen*lx;
            Eigen::Vector3d ey = t+aixlen*ly;
            Eigen::Vector3d ez = t+aixlen*lz;
            fh << "v " << t[0] << " " << t[1] << " " << t[2]
                << " " << 255 << " " << 255 << " " << 255 << std::endl;
            fh << "v " << ex[0] << " " << ex[1] << " " << ex[2] 
                << " " << 255 << " " << 0 << " " << 0 << std::endl;
            fh << "v " << ey[0] << " " << ey[1] << " " << ey[2] 
                << " " << 0 << " " << 255 << " " << 0 << std::endl;
            fh << "v " << ez[0] << " " << ez[1] << " " << ez[2] 
                << " " << 0 << " " << 0 << " " << 255 << std::endl;
        }
        for(std::size_t i = 0 ; i < cmps.size(); ++i)
        {
    
    
            fh << "l " << 4*i+1 << " " << 4*i+2 << std::endl;
            fh << "l " << 4*i+1 << " " << 4*i+3 << std::endl;
            fh << "l " << 4*i+1 << " " << 4*i+4 << std::endl;
        }
        fh.close();
    }

保存为obj文件,使用meshlab文件打开即可
得到的如下:
在这里插入图片描述
2,下面是相机参数定义:
在这里插入图片描述
3:相机外参和相机位姿的关系:

在这里插入图片描述
4:实践中的投影(从三维点投影到二维图像上)应用和画出相机(调试):

在这里插入图片描述

猜你喜欢

转载自blog.csdn.net/weixin_43851636/article/details/109723549#comments_24537239