added projection into feature observations camera states
This commit is contained in:
parent
abd343f576
commit
010d36d216
@ -35,6 +35,8 @@ struct CAMState {
|
|||||||
// Position of the camera frame in the world frame.
|
// Position of the camera frame in the world frame.
|
||||||
Eigen::Vector3d position;
|
Eigen::Vector3d position;
|
||||||
|
|
||||||
|
//Illumination illumination;
|
||||||
|
|
||||||
// These two variables should have the same physical
|
// These two variables should have the same physical
|
||||||
// interpretation with `orientation` and `position`.
|
// interpretation with `orientation` and `position`.
|
||||||
// There two variables are used to modify the measurement
|
// There two variables are used to modify the measurement
|
||||||
|
@ -146,16 +146,46 @@ struct Feature {
|
|||||||
inline bool initializePosition(
|
inline bool initializePosition(
|
||||||
const CamStateServer& cam_states);
|
const CamStateServer& cam_states);
|
||||||
|
|
||||||
|
|
||||||
|
inline cv::Point2f projectPositionToCamera(
|
||||||
|
const CAMState& cam_state,
|
||||||
|
const StateIDType& cam_state_id,
|
||||||
|
const cv::Vec4d& intrinsics,
|
||||||
|
const std::string& distortion_model,
|
||||||
|
const cv::Vec4d& distortion_coeffs,
|
||||||
|
Eigen::Vector3d& in_p) const;
|
||||||
|
|
||||||
|
/*
|
||||||
|
* @brief IrradianceAnchorPatch_Camera returns irradiance values
|
||||||
|
* of the Anchor Patch position in a camera frame
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
|
||||||
|
bool IrradianceOfAnchorPatch(
|
||||||
|
const CAMState& cam_state,
|
||||||
|
const StateIDType& cam_state_id,
|
||||||
|
const cv::Vec4d& intrinsics,
|
||||||
|
const std::string& distortion_model,
|
||||||
|
const cv::Vec4d& distortion_coeffs,
|
||||||
|
const movingWindow& cam0_moving_window,
|
||||||
|
std::vector<uint8_t>& anchorPatch_measurement) const;
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* @brief projectPixelToPosition uses the calcualted pixels
|
* @brief projectPixelToPosition uses the calcualted pixels
|
||||||
* of the anchor patch to generate 3D positions of all of em
|
* of the anchor patch to generate 3D positions of all of em
|
||||||
*/
|
*/
|
||||||
bool projectPixelToPosition(cv::Point2f in_p,
|
inline bool projectPixelToPosition(cv::Point2f in_p,
|
||||||
Eigen::Vector3d& out_p,
|
Eigen::Vector3d& out_p,
|
||||||
const cv::Vec4d& intrinsics,
|
const cv::Vec4d& intrinsics,
|
||||||
const std::string& distortion_model,
|
const std::string& distortion_model,
|
||||||
const cv::Vec4d& distortion_coeffs);
|
const cv::Vec4d& distortion_coeffs);
|
||||||
|
|
||||||
|
/*
|
||||||
|
* @brief Irradiance returns irradiance value of a pixel
|
||||||
|
*/
|
||||||
|
|
||||||
|
inline uint8_t Irradiance(cv::Point2f pose, cv::Mat image) const;
|
||||||
|
|
||||||
// An unique identifier for the feature.
|
// An unique identifier for the feature.
|
||||||
// In case of long time running, the variable
|
// In case of long time running, the variable
|
||||||
// type of id is set to FeatureIDType in order
|
// type of id is set to FeatureIDType in order
|
||||||
@ -329,6 +359,61 @@ bool Feature::checkMotion(
|
|||||||
else return false;
|
else return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
bool Feature::IrradianceOfAnchorPatch(
|
||||||
|
const CAMState& cam_state,
|
||||||
|
const StateIDType& cam_state_id,
|
||||||
|
const cv::Vec4d& intrinsics,
|
||||||
|
const std::string& distortion_model,
|
||||||
|
const cv::Vec4d& distortion_coeffs,
|
||||||
|
const movingWindow& cam0_moving_window,
|
||||||
|
std::vector<uint8_t>& anchorPatch_measurement) const
|
||||||
|
{
|
||||||
|
//project every point in anchorPatch_3d.
|
||||||
|
for (auto point : anchorPatch_3d)
|
||||||
|
{
|
||||||
|
cv::Point2f p_in_c0 = projectPositionToCamera(cam_state, cam_state_id, intrinsics, distortion_model, distortion_coeffs, point);
|
||||||
|
uint8_t irradiance = Irradiance(p_in_c0 , cam0_moving_window.find(cam_state_id)->second);
|
||||||
|
anchorPatch_measurement.push_back(irradiance);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
uint8_t Feature::Irradiance(cv::Point2f pose, cv::Mat image) const
|
||||||
|
{
|
||||||
|
return image.at<uint8_t>(pose.x, pose.y);
|
||||||
|
}
|
||||||
|
|
||||||
|
cv::Point2f Feature::projectPositionToCamera(
|
||||||
|
const CAMState& cam_state,
|
||||||
|
const StateIDType& cam_state_id,
|
||||||
|
const cv::Vec4d& intrinsics,
|
||||||
|
const std::string& distortion_model,
|
||||||
|
const cv::Vec4d& distortion_coeffs,
|
||||||
|
Eigen::Vector3d& in_p) const
|
||||||
|
{
|
||||||
|
Eigen::Isometry3d T_c0_w;
|
||||||
|
|
||||||
|
cv::Point2f out_p;
|
||||||
|
|
||||||
|
// transfrom position to camera frame
|
||||||
|
Eigen::Matrix3d R_w_c0 = quaternionToRotation(cam_state.orientation);
|
||||||
|
const Eigen::Vector3d& t_c0_w = cam_state.position;
|
||||||
|
Eigen::Vector3d p_c0 = R_w_c0 * (in_p-t_c0_w);
|
||||||
|
|
||||||
|
out_p = cv::Point2f(p_c0(0)/p_c0(2), p_c0(1)/p_c0(2));
|
||||||
|
std::vector<cv::Point2f> out_v;
|
||||||
|
out_v.push_back(out_p);
|
||||||
|
std::vector<cv::Point2f> my_p = image_handler::distortPoints( out_v,
|
||||||
|
intrinsics,
|
||||||
|
distortion_model,
|
||||||
|
distortion_coeffs);
|
||||||
|
|
||||||
|
// printf("truPosition: %f, %f, %f\n", position.x(), position.y(), position.z());
|
||||||
|
// printf("camPosition: %f, %f, %f\n", p_c0(0), p_c0(1), p_c0(2));
|
||||||
|
// printf("Photo projection: %f, %f\n", my_p[0].x, my_p[0].y);
|
||||||
|
|
||||||
|
return out_p;
|
||||||
|
}
|
||||||
|
|
||||||
bool Feature::projectPixelToPosition(cv::Point2f in_p,
|
bool Feature::projectPixelToPosition(cv::Point2f in_p,
|
||||||
Eigen::Vector3d& out_p,
|
Eigen::Vector3d& out_p,
|
||||||
const cv::Vec4d& intrinsics,
|
const cv::Vec4d& intrinsics,
|
||||||
@ -342,9 +427,11 @@ bool Feature::projectPixelToPosition(cv::Point2f in_p,
|
|||||||
Eigen::Vector3d PositionInCamera(in_p.x/rho, in_p.y/rho, 1/rho);
|
Eigen::Vector3d PositionInCamera(in_p.x/rho, in_p.y/rho, 1/rho);
|
||||||
Eigen::Vector3d PositionInWorld= T_anchor_w.linear()*PositionInCamera + T_anchor_w.translation();
|
Eigen::Vector3d PositionInWorld= T_anchor_w.linear()*PositionInCamera + T_anchor_w.translation();
|
||||||
anchorPatch_3d.push_back(PositionInWorld);
|
anchorPatch_3d.push_back(PositionInWorld);
|
||||||
printf("%f, %f, %f\n",PositionInWorld[0], PositionInWorld[1], PositionInWorld[2]);
|
//printf("%f, %f, %f\n",PositionInWorld[0], PositionInWorld[1], PositionInWorld[2]);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
//@test center projection must always be initial feature projection
|
||||||
bool Feature::initializeAnchor(
|
bool Feature::initializeAnchor(
|
||||||
const movingWindow& cam0_moving_window,
|
const movingWindow& cam0_moving_window,
|
||||||
const cv::Vec4d& intrinsics,
|
const cv::Vec4d& intrinsics,
|
||||||
@ -352,7 +439,7 @@ bool Feature::initializeAnchor(
|
|||||||
const cv::Vec4d& distortion_coeffs)
|
const cv::Vec4d& distortion_coeffs)
|
||||||
{
|
{
|
||||||
|
|
||||||
int N = 5;
|
int N = 3;
|
||||||
int n = (int)(N-1)/2;
|
int n = (int)(N-1)/2;
|
||||||
|
|
||||||
auto anchor = observations.begin();
|
auto anchor = observations.begin();
|
||||||
@ -364,21 +451,22 @@ bool Feature::initializeAnchor(
|
|||||||
auto v = anchor->second(1)*intrinsics[1] + intrinsics[3];
|
auto v = anchor->second(1)*intrinsics[1] + intrinsics[3];
|
||||||
int count = 0;
|
int count = 0;
|
||||||
|
|
||||||
printf("estimated NxN position: \n");
|
//go through surrounding pixels
|
||||||
for(double u_run = u - n; u_run <= u + n; u_run = u_run + 1)
|
for(double u_run = u - n; u_run <= u + n; u_run = u_run + 1)
|
||||||
{
|
{
|
||||||
for(double v_run = v - n; v_run <= v + n; v_run = v_run + 1)
|
for(double v_run = v - n; v_run <= v + n; v_run = v_run + 1)
|
||||||
{
|
{
|
||||||
anchorPatch.push_back(anchorImage.at<uint8_t>((int)u_run,(int)v_run));
|
anchorPatch.push_back(anchorImage.at<uint8_t>((int)u_run,(int)v_run));
|
||||||
Eigen::Vector3d Npose;
|
Eigen::Vector3d Npose;
|
||||||
projectPixelToPosition(cv::Point2f((u_run-intrinsics[2])/intrinsics[0], (v_run-intrinsics[1])/intrinsics[3]),
|
projectPixelToPosition(cv::Point2f((u_run-intrinsics[2])/intrinsics[0], (v_run-intrinsics[3])/intrinsics[1]),
|
||||||
Npose,
|
Npose,
|
||||||
intrinsics,
|
intrinsics,
|
||||||
distortion_model,
|
distortion_model,
|
||||||
distortion_coeffs);
|
distortion_coeffs);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return true;
|
|
||||||
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
bool Feature::initializePosition(
|
bool Feature::initializePosition(
|
||||||
|
@ -22,6 +22,7 @@
|
|||||||
#include <ros/ros.h>
|
#include <ros/ros.h>
|
||||||
#include <sensor_msgs/Imu.h>
|
#include <sensor_msgs/Imu.h>
|
||||||
#include <sensor_msgs/Image.h>
|
#include <sensor_msgs/Image.h>
|
||||||
|
#include <sensor_msgs/PointCloud.h>
|
||||||
#include <nav_msgs/Odometry.h>
|
#include <nav_msgs/Odometry.h>
|
||||||
#include <std_msgs/Float64.h>
|
#include <std_msgs/Float64.h>
|
||||||
#include <tf/transform_broadcaster.h>
|
#include <tf/transform_broadcaster.h>
|
||||||
@ -174,6 +175,20 @@ class MsckfVio {
|
|||||||
void featureJacobian(const FeatureIDType& feature_id,
|
void featureJacobian(const FeatureIDType& feature_id,
|
||||||
const std::vector<StateIDType>& cam_state_ids,
|
const std::vector<StateIDType>& cam_state_ids,
|
||||||
Eigen::MatrixXd& H_x, Eigen::VectorXd& r);
|
Eigen::MatrixXd& H_x, Eigen::VectorXd& r);
|
||||||
|
|
||||||
|
|
||||||
|
void PhotometricMeasurementJacobian(
|
||||||
|
const StateIDType& cam_state_id,
|
||||||
|
const FeatureIDType& feature_id,
|
||||||
|
Eigen::Matrix<double, 4, 6>& H_x,
|
||||||
|
Eigen::Matrix<double, 4, 3>& H_f,
|
||||||
|
Eigen::Vector4d& r);
|
||||||
|
|
||||||
|
void PhotometricFeatureJacobian(
|
||||||
|
const FeatureIDType& feature_id,
|
||||||
|
const std::vector<StateIDType>& cam_state_ids,
|
||||||
|
Eigen::MatrixXd& H_x, Eigen::VectorXd& r);
|
||||||
|
|
||||||
void measurementUpdate(const Eigen::MatrixXd& H,
|
void measurementUpdate(const Eigen::MatrixXd& H,
|
||||||
const Eigen::VectorXd& r);
|
const Eigen::VectorXd& r);
|
||||||
bool gatingTest(const Eigen::MatrixXd& H,
|
bool gatingTest(const Eigen::MatrixXd& H,
|
||||||
|
@ -622,6 +622,7 @@ void ImageProcessor::stereoMatch(
|
|||||||
image_handler::undistortPoints(cam0_points, cam0_intrinsics, cam0_distortion_model,
|
image_handler::undistortPoints(cam0_points, cam0_intrinsics, cam0_distortion_model,
|
||||||
cam0_distortion_coeffs, cam0_points_undistorted,
|
cam0_distortion_coeffs, cam0_points_undistorted,
|
||||||
R_cam0_cam1);
|
R_cam0_cam1);
|
||||||
|
|
||||||
cam1_points = image_handler::distortPoints(cam0_points_undistorted, cam1_intrinsics,
|
cam1_points = image_handler::distortPoints(cam0_points_undistorted, cam1_intrinsics,
|
||||||
cam1_distortion_model, cam1_distortion_coeffs);
|
cam1_distortion_model, cam1_distortion_coeffs);
|
||||||
}
|
}
|
||||||
|
@ -443,12 +443,14 @@ void MsckfVio::initializeGravityAndBias() {
|
|||||||
// is consistent with the inertial frame.
|
// is consistent with the inertial frame.
|
||||||
double gravity_norm = gravity_imu.norm();
|
double gravity_norm = gravity_imu.norm();
|
||||||
IMUState::gravity = Vector3d(0.0, 0.0, -gravity_norm);
|
IMUState::gravity = Vector3d(0.0, 0.0, -gravity_norm);
|
||||||
|
|
||||||
Quaterniond q0_i_w = Quaterniond::FromTwoVectors(
|
Quaterniond q0_i_w = Quaterniond::FromTwoVectors(
|
||||||
gravity_imu, -IMUState::gravity);
|
gravity_imu, -IMUState::gravity);
|
||||||
state_server.imu_state.orientation =
|
state_server.imu_state.orientation =
|
||||||
rotationToQuaternion(q0_i_w.toRotationMatrix().transpose());
|
rotationToQuaternion(q0_i_w.toRotationMatrix().transpose());
|
||||||
|
// printf("gravity Norm %f\n", gravity_norm);
|
||||||
|
// printf("linear_acc %f, %f, %f\n", gravity_imu(0), gravity_imu(1), gravity_imu(2));
|
||||||
|
// printf("quaterniond: %f, %f, %f, %f\n", q0_i_w.w(), q0_i_w.x(), q0_i_w.y(), q0_i_w.z());
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -803,6 +805,7 @@ void MsckfVio::stateAugmentation(const double& time) {
|
|||||||
cam_state.orientation_null = cam_state.orientation;
|
cam_state.orientation_null = cam_state.orientation;
|
||||||
cam_state.position_null = cam_state.position;
|
cam_state.position_null = cam_state.position;
|
||||||
|
|
||||||
|
|
||||||
// Update the covariance matrix of the state.
|
// Update the covariance matrix of the state.
|
||||||
// To simplify computation, the matrix J below is the nontrivial block
|
// To simplify computation, the matrix J below is the nontrivial block
|
||||||
// in Equation (16) in "A Multi-State Constraint Kalman Filter for Vision
|
// in Equation (16) in "A Multi-State Constraint Kalman Filter for Vision
|
||||||
@ -874,6 +877,157 @@ void MsckfVio::addFeatureObservations(
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void MsckfVio::PhotometricMeasurementJacobian(
|
||||||
|
const StateIDType& cam_state_id,
|
||||||
|
const FeatureIDType& feature_id,
|
||||||
|
Matrix<double, 4, 6>& H_x, Matrix<double, 4, 3>& H_f, Vector4d& r) {
|
||||||
|
|
||||||
|
// Prepare all the required data.
|
||||||
|
const CAMState& cam_state = state_server.cam_states[cam_state_id];
|
||||||
|
const Feature& feature = map_server[feature_id];
|
||||||
|
|
||||||
|
// Cam0 pose.
|
||||||
|
Matrix3d R_w_c0 = quaternionToRotation(cam_state.orientation);
|
||||||
|
const Vector3d& t_c0_w = cam_state.position;
|
||||||
|
|
||||||
|
// Cam1 pose.
|
||||||
|
Matrix3d R_c0_c1 = CAMState::T_cam0_cam1.linear();
|
||||||
|
Matrix3d R_w_c1 = CAMState::T_cam0_cam1.linear() * R_w_c0;
|
||||||
|
Vector3d t_c1_w = t_c0_w - R_w_c1.transpose()*CAMState::T_cam0_cam1.translation();
|
||||||
|
|
||||||
|
// 3d feature position in the world frame.
|
||||||
|
// And its observation with the stereo cameras.
|
||||||
|
const Vector3d& p_w = feature.position;
|
||||||
|
|
||||||
|
//observation
|
||||||
|
const Vector4d& z = feature.observations.find(cam_state_id)->second;
|
||||||
|
|
||||||
|
//photometric observation
|
||||||
|
std::vector<uint8_t> photo_z;
|
||||||
|
feature.IrradianceOfAnchorPatch(cam_state, cam_state_id, cam0_intrinsics, cam0_distortion_model, cam0_distortion_coeffs, cam0_moving_window, photo_z);
|
||||||
|
|
||||||
|
|
||||||
|
// Convert the feature position from the world frame to
|
||||||
|
// the cam0 and cam1 frame.
|
||||||
|
Vector3d p_c0 = R_w_c0 * (p_w-t_c0_w);
|
||||||
|
Vector3d p_c1 = R_w_c1 * (p_w-t_c1_w);
|
||||||
|
|
||||||
|
|
||||||
|
//compute resulting esimtated position in image
|
||||||
|
cv::Point2f out_p = cv::Point2f(p_c0(0)/p_c0(2), p_c0(1)/p_c0(2));
|
||||||
|
std::vector<cv::Point2f> out_v;
|
||||||
|
out_v.push_back(out_p);
|
||||||
|
|
||||||
|
/*
|
||||||
|
//prints the feature projection in pixel space
|
||||||
|
std::vector<cv::Point2f> my_p = image_handler::distortPoints( out_v,
|
||||||
|
cam0_intrinsics,
|
||||||
|
cam0_distortion_model,
|
||||||
|
cam0_distortion_coeffs);
|
||||||
|
printf("projection: %f, %f\n", my_p[0].x, my_p[0].y);
|
||||||
|
*/
|
||||||
|
|
||||||
|
// Compute the Jacobians.
|
||||||
|
Matrix<double, 4, 3> dz_dpc0 = Matrix<double, 4, 3>::Zero();
|
||||||
|
dz_dpc0(0, 0) = 1 / p_c0(2);
|
||||||
|
dz_dpc0(1, 1) = 1 / p_c0(2);
|
||||||
|
dz_dpc0(0, 2) = -p_c0(0) / (p_c0(2)*p_c0(2));
|
||||||
|
dz_dpc0(1, 2) = -p_c0(1) / (p_c0(2)*p_c0(2));
|
||||||
|
|
||||||
|
Matrix<double, 4, 3> dz_dpc1 = Matrix<double, 4, 3>::Zero();
|
||||||
|
dz_dpc1(2, 0) = 1 / p_c1(2);
|
||||||
|
dz_dpc1(3, 1) = 1 / p_c1(2);
|
||||||
|
dz_dpc1(2, 2) = -p_c1(0) / (p_c1(2)*p_c1(2));
|
||||||
|
dz_dpc1(3, 2) = -p_c1(1) / (p_c1(2)*p_c1(2));
|
||||||
|
|
||||||
|
Matrix<double, 3, 6> dpc0_dxc = Matrix<double, 3, 6>::Zero();
|
||||||
|
dpc0_dxc.leftCols(3) = skewSymmetric(p_c0);
|
||||||
|
dpc0_dxc.rightCols(3) = -R_w_c0;
|
||||||
|
|
||||||
|
Matrix<double, 3, 6> dpc1_dxc = Matrix<double, 3, 6>::Zero();
|
||||||
|
dpc1_dxc.leftCols(3) = R_c0_c1 * skewSymmetric(p_c0);
|
||||||
|
dpc1_dxc.rightCols(3) = -R_w_c1;
|
||||||
|
|
||||||
|
Matrix3d dpc0_dpg = R_w_c0;
|
||||||
|
Matrix3d dpc1_dpg = R_w_c1;
|
||||||
|
|
||||||
|
H_x = dz_dpc0*dpc0_dxc + dz_dpc1*dpc1_dxc;
|
||||||
|
H_f = dz_dpc0*dpc0_dpg + dz_dpc1*dpc1_dpg;
|
||||||
|
|
||||||
|
// Modifty the measurement Jacobian to ensure
|
||||||
|
// observability constrain.
|
||||||
|
Matrix<double, 4, 6> A = H_x;
|
||||||
|
Matrix<double, 6, 1> u = Matrix<double, 6, 1>::Zero();
|
||||||
|
u.block<3, 1>(0, 0) = quaternionToRotation(
|
||||||
|
cam_state.orientation_null) * IMUState::gravity;
|
||||||
|
u.block<3, 1>(3, 0) = skewSymmetric(
|
||||||
|
p_w-cam_state.position_null) * IMUState::gravity;
|
||||||
|
H_x = A - A*u*(u.transpose()*u).inverse()*u.transpose();
|
||||||
|
H_f = -H_x.block<4, 3>(0, 3);
|
||||||
|
|
||||||
|
// Compute the residual.
|
||||||
|
r = z - Vector4d(p_c0(0)/p_c0(2), p_c0(1)/p_c0(2),
|
||||||
|
p_c1(0)/p_c1(2), p_c1(1)/p_c1(2));
|
||||||
|
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
void MsckfVio::PhotometricFeatureJacobian(
|
||||||
|
const FeatureIDType& feature_id,
|
||||||
|
const std::vector<StateIDType>& cam_state_ids,
|
||||||
|
MatrixXd& H_x, VectorXd& r) {
|
||||||
|
|
||||||
|
const auto& feature = map_server[feature_id];
|
||||||
|
|
||||||
|
// Check how many camera states in the provided camera
|
||||||
|
// id camera has actually seen this feature.
|
||||||
|
vector<StateIDType> valid_cam_state_ids(0);
|
||||||
|
for (const auto& cam_id : cam_state_ids) {
|
||||||
|
if (feature.observations.find(cam_id) ==
|
||||||
|
feature.observations.end()) continue;
|
||||||
|
|
||||||
|
valid_cam_state_ids.push_back(cam_id);
|
||||||
|
}
|
||||||
|
|
||||||
|
int jacobian_row_size = 0;
|
||||||
|
jacobian_row_size = 4 * valid_cam_state_ids.size();
|
||||||
|
|
||||||
|
MatrixXd H_xj = MatrixXd::Zero(jacobian_row_size,
|
||||||
|
21+state_server.cam_states.size()*6);
|
||||||
|
MatrixXd H_fj = MatrixXd::Zero(jacobian_row_size, 3);
|
||||||
|
VectorXd r_j = VectorXd::Zero(jacobian_row_size);
|
||||||
|
int stack_cntr = 0;
|
||||||
|
|
||||||
|
for (const auto& cam_id : valid_cam_state_ids) {
|
||||||
|
|
||||||
|
Matrix<double, 4, 6> H_xi = Matrix<double, 4, 6>::Zero();
|
||||||
|
Matrix<double, 4, 3> H_fi = Matrix<double, 4, 3>::Zero();
|
||||||
|
Vector4d r_i = Vector4d::Zero();
|
||||||
|
PhotometricMeasurementJacobian(cam_id, feature.id, H_xi, H_fi, r_i);
|
||||||
|
|
||||||
|
auto cam_state_iter = state_server.cam_states.find(cam_id);
|
||||||
|
int cam_state_cntr = std::distance(
|
||||||
|
state_server.cam_states.begin(), cam_state_iter);
|
||||||
|
|
||||||
|
// Stack the Jacobians.
|
||||||
|
H_xj.block<4, 6>(stack_cntr, 21+6*cam_state_cntr) = H_xi;
|
||||||
|
H_fj.block<4, 3>(stack_cntr, 0) = H_fi;
|
||||||
|
r_j.segment<4>(stack_cntr) = r_i;
|
||||||
|
stack_cntr += 4;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Project the residual and Jacobians onto the nullspace
|
||||||
|
// of H_fj.
|
||||||
|
JacobiSVD<MatrixXd> svd_helper(H_fj, ComputeFullU | ComputeThinV);
|
||||||
|
MatrixXd A = svd_helper.matrixU().rightCols(
|
||||||
|
jacobian_row_size - 3);
|
||||||
|
|
||||||
|
H_x = A.transpose() * H_xj;
|
||||||
|
r = A.transpose() * r_j;
|
||||||
|
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
void MsckfVio::measurementJacobian(
|
void MsckfVio::measurementJacobian(
|
||||||
const StateIDType& cam_state_id,
|
const StateIDType& cam_state_id,
|
||||||
const FeatureIDType& feature_id,
|
const FeatureIDType& feature_id,
|
||||||
@ -1199,7 +1353,7 @@ void MsckfVio::removeLostFeatures() {
|
|||||||
|
|
||||||
MatrixXd H_xj;
|
MatrixXd H_xj;
|
||||||
VectorXd r_j;
|
VectorXd r_j;
|
||||||
featureJacobian(feature.id, cam_state_ids, H_xj, r_j);
|
PhotometricFeatureJacobian(feature.id, cam_state_ids, H_xj, r_j);
|
||||||
|
|
||||||
if (gatingTest(H_xj, r_j, cam_state_ids.size()-1)) {
|
if (gatingTest(H_xj, r_j, cam_state_ids.size()-1)) {
|
||||||
H_x.block(stack_cntr, 0, H_xj.rows(), H_xj.cols()) = H_xj;
|
H_x.block(stack_cntr, 0, H_xj.rows(), H_xj.cols()) = H_xj;
|
||||||
@ -1316,6 +1470,13 @@ void MsckfVio::pruneCamStateBuffer() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if(!feature.initializeAnchor(cam0_moving_window, cam0_intrinsics, cam0_distortion_model, cam0_distortion_coeffs))
|
||||||
|
{
|
||||||
|
for (const auto& cam_id : involved_cam_state_ids)
|
||||||
|
feature.observations.erase(cam_id);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
jacobian_row_size += 4*involved_cam_state_ids.size() - 3;
|
jacobian_row_size += 4*involved_cam_state_ids.size() - 3;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1342,7 +1503,7 @@ void MsckfVio::pruneCamStateBuffer() {
|
|||||||
|
|
||||||
MatrixXd H_xj;
|
MatrixXd H_xj;
|
||||||
VectorXd r_j;
|
VectorXd r_j;
|
||||||
featureJacobian(feature.id, involved_cam_state_ids, H_xj, r_j);
|
PhotometricFeatureJacobian(feature.id, involved_cam_state_ids, H_xj, r_j);
|
||||||
|
|
||||||
if (gatingTest(H_xj, r_j, involved_cam_state_ids.size())) {
|
if (gatingTest(H_xj, r_j, involved_cam_state_ids.size())) {
|
||||||
H_x.block(stack_cntr, 0, H_xj.rows(), H_xj.cols()) = H_xj;
|
H_x.block(stack_cntr, 0, H_xj.rows(), H_xj.cols()) = H_xj;
|
||||||
|
Loading…
Reference in New Issue
Block a user