0.前言
局部优化作为VSLAM当中常用的策略,其作用相当于激光SLAM中的局部地图的ICP or NDT优化(scan2localmap)。如下图所示,在VIO当中,随着时间的推移,路标特征点(landmark)和相机的位姿pose越来越多,BA的计算量随着变量的增加而增加,即使BA的H矩阵是稀疏的,也吃不消。因此,我们要限制优化变量的多少,不能只一味的增加待优化的变量到BA里,而应该去掉一些变量。那么如何丢变量就成了一个很重要的问题!在vins mono和vins fusion中都存在void Estimator::optimization()
函数用来用滑窗执行Ceres优化,边缘化,更新滑窗内图像帧的状态(位姿、速度、偏置、外参、逆深度、相机与IMU时差),但是这个之前在VINS-FUSION 前端后端代码全详解忽略了这一重要的部分。所以在写完回环后发现,关注了全局优化的同时,局部优化却没有关注。为此写着一篇文章来分析vins中的局部优化部分
1. VIO中的状态向量与代价函数
视觉惯性BA:这三项依次为边缘化的先验信息、IMU的测量残差、视觉的重投影误差
BA优化模型分为三部分:
1、视觉误差函数部分(滑动窗口中特征点在相机下视觉重投影残差)
2、IMU残差部分(滑动窗口中相邻帧间的IMU产生)
3、Marg边缘化残差部分(滑动窗口中去掉位姿和特征点约束)代码中使用Google开源的Ceres solver解决。
2. 视觉约束
这部分要拟合的目标可以通过重投影误差约束,求解的是对同一个路标点的观测值和估计值之间的误差,注意是在归一化平面上表示。
当某路标点在第i帧观测到并进行初始化操作得到路标点逆深度,当其在第j帧也被观测到时,估计其在第j帧中的坐标为:
此时的视觉残差为:(左侧为根据i帧反推估计的位置,右侧为观测值)
逆深度作为参数原因:
1)观测到的特征点深度可能非常大,难以进行优化;
2)可以减少实际优化的参数变量;
3)逆深度更加服从高斯分布。这里特征点的逆深度在第i帧初始化操作中得到。
我们可以看到在vins fusion中
//左相机在i时刻和j时刻分别观测到路标点
ProjectionTwoFrameOneCamFactor *f_td = new ProjectionTwoFrameOneCamFactor(pts_i, pts_j, it_per_id.feature_per_frame[0].velocity, it_per_frame.velocity,
it_per_id.feature_per_frame[0].cur_td, it_per_frame.cur_td);
problem.AddResidualBlock(f_td, loss_function, para_Pose[imu_i], para_Pose[imu_j], para_Ex_Pose[0], para_Feature[feature_index], para_Td[0]);
}
if(STEREO && it_per_frame.is_stereo)
{
Vector3d pts_j_right = it_per_frame.pointRight;
if(imu_i != imu_j)
{ //左相机在i时刻、右相机在j时刻分别观测到路标点
ProjectionTwoFrameTwoCamFactor *f = new ProjectionTwoFrameTwoCamFactor(pts_i, pts_j_right, it_per_id.feature_per_frame[0].velocity, it_per_frame.velocityRight,
it_per_id.feature_per_frame[0].cur_td, it_per_frame.cur_td);
problem.AddResidualBlock(f, loss_function, para_Pose[imu_i], para_Pose[imu_j], para_Ex_Pose[0], para_Ex_Pose[1], para_Feature[feature_index], para_Td[0]);
}
else
{ //左相机和右相机在i时刻分别观测到路标点
ProjectionOneFrameTwoCamFactor *f = new ProjectionOneFrameTwoCamFactor(pts_i, pts_j_right, it_per_id.feature_per_frame[0].velocity, it_per_frame.velocityRight,
it_per_id.feature_per_frame[0].cur_td, it_per_frame.cur_td);
problem.AddResidualBlock(f, loss_function, para_Ex_Pose[0], para_Ex_Pose[1], para_Feature[feature_index], para_Td[0]);
}
}
目前我们将以ProjectionTwoFrameOneCamFactor
为例子介绍特征点逆深度求解
/**
* 迭代优化每一步调用,计算变量x在当前状态下的残差,以及残差对变量的Jacobian,用于计算delta_x,更新变量x
* 优化重投影误差
* 1、优化变量:前一帧位姿,当前帧位姿,相机与IMU外参,特征点逆深度,相机与IMU时差。对匹配点构建重投影误差
* 2、计算残差对优化变量的Jacobian
* @param parameters 优化变量的值
* @param residuals output 残差
* @param jacobians output 残差对优化变量的Jacobian
*/
bool ProjectionTwoFrameOneCamFactor::Evaluate(double const *const *parameters, double *residuals, double **jacobians) const
{
// 优化变量:前一帧位姿7,当前位姿7,相机与IMU外参7,特征点逆深度1,相机与IMU时差1
TicToc tic_toc;
Eigen::Vector3d Pi(parameters[0][0], parameters[0][1], parameters[0][2]);
Eigen::Quaterniond Qi(parameters[0][6], parameters[0][3], parameters[0][4], parameters[0][5]);
Eigen::Vector3d Pj(parameters[1][0], parameters[1][1], parameters[1][2]);
Eigen::Quaterniond Qj(parameters[1][6], parameters[1][3], parameters[1][4], parameters[1][5]);
Eigen::Vector3d tic(parameters[2][0], parameters[2][1], parameters[2][2]);
Eigen::Quaterniond qic(parameters[2][6], parameters[2][3], parameters[2][4], parameters[2][5]);
double inv_dep_i = parameters[3][0];
double td = parameters[4][0];
Eigen::Vector3d pts_i_td, pts_j_td;
// 匹配点ij, 归一化相机点时差校正,对应到采集时刻的位置,因为IMU数据是对应图像采集时刻的
pts_i_td = pts_i - (td - td_i) * velocity_i;
pts_j_td = pts_j - (td - td_j) * velocity_j;
// 转到相机系
Eigen::Vector3d pts_camera_i = pts_i_td / inv_dep_i;
// 转到IMU系
Eigen::Vector3d pts_imu_i = qic * pts_camera_i + tic;
// 转到世界系
Eigen::Vector3d pts_w = Qi * pts_imu_i + Pi;
// 转到j的IMU系
Eigen::Vector3d pts_imu_j = Qj.inverse() * (pts_w - Pj);
// 转到j的相机系
Eigen::Vector3d pts_camera_j = qic.inverse() * (pts_imu_j - tic);
Eigen::Map<Eigen::Vector2d> residual(residuals);
#ifdef UNIT_SPHERE_ERROR
residual = tangent_base * (pts_camera_j.normalized() - pts_j_td.normalized());
#else
// 计算归一化相机平面上的两点误差
double dep_j = pts_camera_j.z();
residual = (pts_camera_j / dep_j).head<2>() - pts_j_td.head<2>();
#endif
residual = sqrt_info * residual;
if (jacobians)
{
Eigen::Matrix3d Ri = Qi.toRotationMatrix();
Eigen::Matrix3d Rj = Qj.toRotationMatrix();
Eigen::Matrix3d ric = qic.toRotationMatrix();
Eigen::Matrix<double, 2, 3> reduce(2, 3);
#ifdef UNIT_SPHERE_ERROR
double norm = pts_camera_j.norm();
Eigen::Matrix3d norm_jaco;
double x1, x2, x3;
x1 = pts_camera_j(0);
x2 = pts_camera_j(1);
x3 = pts_camera_j(2);
norm_jaco << 1.0 / norm - x1 * x1 / pow(norm, 3), - x1 * x2 / pow(norm, 3), - x1 * x3 / pow(norm, 3),
- x1 * x2 / pow(norm, 3), 1.0 / norm - x2 * x2 / pow(norm, 3), - x2 * x3 / pow(norm, 3),
- x1 * x3 / pow(norm, 3), - x2 * x3 / pow(norm, 3), 1.0 / norm - x3 * x3 / pow(norm, 3);
reduce = tangent_base * norm_jaco;
#else
reduce << 1. / dep_j, 0, -pts_camera_j(0) / (dep_j * dep_j),
0, 1. / dep_j, -pts_camera_j(1) / (dep_j * dep_j);
#endif
reduce = sqrt_info * reduce;
// 下面是计算残差对优化变量的Jacobian
if (jacobians[0])
{
Eigen::Map<Eigen::Matrix<double, 2, 7, Eigen::RowMajor>> jacobian_pose_i(jacobians[0]);
Eigen::Matrix<double, 3, 6> jaco_i;
jaco_i.leftCols<3>() = ric.transpose() * Rj.transpose();
jaco_i.rightCols<3>() = ric.transpose() * Rj.transpose() * Ri * -Utility::skewSymmetric(pts_imu_i);
jacobian_pose_i.leftCols<6>() = reduce * jaco_i;
jacobian_pose_i.rightCols<1>().setZero();
}
if (jacobians[1])
{
Eigen::Map<Eigen::Matrix<double, 2, 7, Eigen::RowMajor>> jacobian_pose_j(jacobians[1]);
Eigen::Matrix<double, 3, 6> jaco_j;
jaco_j.leftCols<3>() = ric.transpose() * -Rj.transpose();
jaco_j.rightCols<3>() = ric.transpose() * Utility::skewSymmetric(pts_imu_j);
jacobian_pose_j.leftCols<6>() = reduce * jaco_j;
jacobian_pose_j.rightCols<1>().setZero();
}
if (jacobians[2])
{
Eigen::Map<Eigen::Matrix<double, 2, 7, Eigen::RowMajor>> jacobian_ex_pose(jacobians[2]);
Eigen::Matrix<double, 3, 6> jaco_ex;
jaco_ex.leftCols<3>() = ric.transpose() * (Rj.transpose() * Ri - Eigen::Matrix3d::Identity());
Eigen::Matrix3d tmp_r = ric.transpose() * Rj.transpose() * Ri * ric;
jaco_ex.rightCols<3>() = -tmp_r * Utility::skewSymmetric(pts_camera_i) + Utility::skewSymmetric(tmp_r * pts_camera_i) +
Utility::skewSymmetric(ric.transpose() * (Rj.transpose() * (Ri * tic + Pi - Pj) - tic));
jacobian_ex_pose.leftCols<6>() = reduce * jaco_ex;
jacobian_ex_pose.rightCols<1>().setZero();
}
if (jacobians[3])
{
Eigen::Map<Eigen::Vector2d> jacobian_feature(jacobians[3]);
jacobian_feature = reduce * ric.transpose() * Rj.transpose() * Ri * ric * pts_i_td * -1.0 / (inv_dep_i * inv_dep_i);
}
if (jacobians[4])
{
Eigen::Map<Eigen::Vector2d> jacobian_td(jacobians[4]);
jacobian_td = reduce * ric.transpose() * Rj.transpose() * Ri * ric * velocity_i / inv_dep_i * -1.0 +
sqrt_info * velocity_j.head(2);
}
}
sum_t += tic_toc.toc();
return true;
}
3. IMU约束
优化变量:
IMU的第i、j时刻下的p位置,v速度,Q旋转(PVQ),两个偏置ba,bw
这一块可以看一下之前写的文章
// (2) 添加IMU残差
if(USE_IMU)
{
for (int i = 0; i < frame_count; i++)
{
int j = i + 1;
if (pre_integrations[j]->sum_dt > 10.0)
continue;
// 前后帧之间建立IMU残差
IMUFactor* imu_factor = new IMUFactor(pre_integrations[j]);
// 后面四个参数为变量初始值,优化过程中会更新
problem.AddResidualBlock(imu_factor, NULL, para_Pose[i], para_SpeedBias[i], para_Pose[j], para_SpeedBias[j]);
}
}
4. 基于舒尔补的边缘化
这一部分本质上就是通过舒尔补方式来大幅降低计算的操作,这一块作者找到了网上非常详细的解释,这里目前作者并没有涉及过多的代码阅读,仅仅只是去进行了了解。
如果仅仅从前后两帧图像计算相机位姿,速度快但是精度低;但是采用全局优化BA,连接所有图像帧,精度高但是复杂度高。
采用滑动窗,固定数量的帧进行优化,这样能够保证精度和速度。既然是滑动窗,在滑动的过程中会有新的图像进来,旧的图像离开,所谓边缘化就是为了删除图像,但是把图像信息约束保留下来
为了防止pose和特征的个数的复杂度随着时间不断增长,引入边缘化,在移除位姿时将关联的约束转化为先验放入优化问题中。
为了限制基于优化的VIO计算复杂度,引入边缘化。有选择地从滑动窗口中将IMU状态xK和特征λ1边缘化,同时将对应于边缘状态的测量值转换为先验。
分为两种情况,
- 一种是倒数第二帧如果是关键帧的话,将最旧的pose移出Sliding Window,将最旧帧关联的视觉和惯性数据边缘化掉。把第一个老关键帧及其测量值被边缘化
-
如果倒数第二帧不是关键帧的话,那么就只剔除倒数第二帧的视觉观测,而不剔除它的IMU约束。原因是边缘化保证关键帧之间有足够视差而能够三角化足够多的地图点。并且保证了IMU预积分的连贯性。
/** * Step4. 边缘化操作 */ // 以下是边缘化操作 TicToc t_whole_marginalization; // Marg最早帧 if (marginalization_flag == MARGIN_OLD) { MarginalizationInfo *marginalization_info = new MarginalizationInfo(); // 滑窗中的帧位姿、速度、偏置、外参、特征点逆深度等参数,转换成数组 vector2double(); // 先验残差 if (last_marginalization_info && last_marginalization_info->valid) { vector<int> drop_set; // 上一次Marg剩下的参数块 for (int i = 0; i < static_cast<int>(last_marginalization_parameter_blocks.size()); i++) { if (last_marginalization_parameter_blocks[i] == para_Pose[0] || last_marginalization_parameter_blocks[i] == para_SpeedBias[0]) drop_set.push_back(i); } // construct new marginlization_factor MarginalizationFactor *marginalization_factor = new MarginalizationFactor(last_marginalization_info); ResidualBlockInfo *residual_block_info = new ResidualBlockInfo(marginalization_factor, NULL, last_marginalization_parameter_blocks, drop_set); marginalization_info->addResidualBlockInfo(residual_block_info); } // 滑窗首帧与后一帧之间的IMU残差 if(USE_IMU) { if (pre_integrations[1]->sum_dt < 10.0) { IMUFactor* imu_factor = new IMUFactor(pre_integrations[1]); ResidualBlockInfo *residual_block_info = new ResidualBlockInfo(imu_factor, NULL, vector<double *>{para_Pose[0], para_SpeedBias[0], para_Pose[1], para_SpeedBias[1]}, vector<int>{0, 1}); marginalization_info->addResidualBlockInfo(residual_block_info); } } // 滑窗首帧与其他帧之间的视觉重投影残差 { // 遍历特征点 int feature_index = -1; for (auto &it_per_id : f_manager.feature) { it_per_id.used_num = it_per_id.feature_per_frame.size(); if (it_per_id.used_num < 4) continue; ++feature_index; int imu_i = it_per_id.start_frame, imu_j = imu_i - 1; if (imu_i != 0) continue; // 首帧观测帧归一化相机平面点 Vector3d pts_i = it_per_id.feature_per_frame[0].point; // 遍历观测帧 for (auto &it_per_frame : it_per_id.feature_per_frame) { imu_j++; // 非首个观测帧 if(imu_i != imu_j) { Vector3d pts_j = it_per_frame.point; ProjectionTwoFrameOneCamFactor *f_td = new ProjectionTwoFrameOneCamFactor(pts_i, pts_j, it_per_id.feature_per_frame[0].velocity, it_per_frame.velocity, it_per_id.feature_per_frame[0].cur_td, it_per_frame.cur_td); ResidualBlockInfo *residual_block_info = new ResidualBlockInfo(f_td, loss_function, vector<double *>{para_Pose[imu_i], para_Pose[imu_j], para_Ex_Pose[0], para_Feature[feature_index], para_Td[0]}, vector<int>{0, 3}); marginalization_info->addResidualBlockInfo(residual_block_info); } if(STEREO && it_per_frame.is_stereo) { Vector3d pts_j_right = it_per_frame.pointRight; if(imu_i != imu_j) { ProjectionTwoFrameTwoCamFactor *f = new ProjectionTwoFrameTwoCamFactor(pts_i, pts_j_right, it_per_id.feature_per_frame[0].velocity, it_per_frame.velocityRight, it_per_id.feature_per_frame[0].cur_td, it_per_frame.cur_td); ResidualBlockInfo *residual_block_info = new ResidualBlockInfo(f, loss_function, vector<double *>{para_Pose[imu_i], para_Pose[imu_j], para_Ex_Pose[0], para_Ex_Pose[1], para_Feature[feature_index], para_Td[0]}, vector<int>{0, 4}); marginalization_info->addResidualBlockInfo(residual_block_info); } else { ProjectionOneFrameTwoCamFactor *f = new ProjectionOneFrameTwoCamFactor(pts_i, pts_j_right, it_per_id.feature_per_frame[0].velocity, it_per_frame.velocityRight, it_per_id.feature_per_frame[0].cur_td, it_per_frame.cur_td); ResidualBlockInfo *residual_block_info = new ResidualBlockInfo(f, loss_function, vector<double *>{para_Ex_Pose[0], para_Ex_Pose[1], para_Feature[feature_index], para_Td[0]}, vector<int>{2}); marginalization_info->addResidualBlockInfo(residual_block_info); } } } } } TicToc t_pre_margin; marginalization_info->preMarginalize(); ROS_DEBUG("pre marginalization %f ms", t_pre_margin.toc()); // 执行marg边缘化 TicToc t_margin; marginalization_info->marginalize(); ROS_DEBUG("marginalization %f ms", t_margin.toc()); // marg首帧之后,将参数数组中每个位置的值设为前面元素的值,记录到addr_shift里面 // [<p1,p0>,<p2,p1>,...] std::unordered_map<long, double *> addr_shift; for (int i = 1; i <= WINDOW_SIZE; i++) { addr_shift[reinterpret_cast<long>(para_Pose[i])] = para_Pose[i - 1]; printf("pose %ld,%ld\n",reinterpret_cast<long>(para_Pose[i]),reinterpret_cast<long>(para_Pose[i-1])); if(USE_IMU) { addr_shift[reinterpret_cast<long>(para_SpeedBias[i])] = para_SpeedBias[i - 1]; printf("speedBias %ld,%ld\n",reinterpret_cast<long>(para_SpeedBias[i]),reinterpret_cast<long>(para_SpeedBias[i-1])); } } for (int i = 0; i < NUM_OF_CAM; i++) { addr_shift[reinterpret_cast<long>(para_Ex_Pose[i])] = para_Ex_Pose[i]; printf("exPose %ld,%ld\n",reinterpret_cast<long>(para_Ex_Pose[i]),reinterpret_cast<long>(para_Ex_Pose[i])); } addr_shift[reinterpret_cast<long>(para_Td[0])] = para_Td[0]; printf("td %ld,%ld\n",reinterpret_cast<long>(para_Td[0]),reinterpret_cast<long>(para_Td[0])); vector<double *> parameter_blocks = marginalization_info->getParameterBlocks(addr_shift); if (last_marginalization_info) delete last_marginalization_info; // 保存marg信息 last_marginalization_info = marginalization_info; last_marginalization_parameter_blocks = parameter_blocks; }
VINS中需要边缘化滑动窗口中的最老帧,目的是希望不再计算这一帧的位姿或者与其相关的路标点,但是希望保留该帧对窗口内其余帧的约束关系。我们基于与移除状态相关的所有边缘化测量值构造一个先验。新的先验项被添加到现有的先验项中。
注意:去掉了x1,但是之前和x1相连的所有量x2 x3 x4 x5 在marg掉x1后变得两两相连。
5. 参考链接
https://www.freesion.com/article/71171025418/
https://blog.csdn.net/iwanderu/article/details/104837057/
https://www.cnblogs.com/glxin/p/11990551.html
评论(0)
您还未登录,请登录后发表或查看评论