changed structure for image undistort into image_handler

This commit is contained in:
Raphael Maenle 2019-06-27 15:57:24 +02:00
parent ebc73c0c5e
commit b3e86b3e64
6 changed files with 128 additions and 186 deletions

View File

@ -138,8 +138,6 @@ void Rhocost(const Eigen::Isometry3d& T_c0_ci,
const CamStateServer& cam_states) const; const CamStateServer& cam_states) const;
bool initializeAnchorUndistort(const CameraCalibration& cam, int N);
/* /*
* @brief InitializeAnchor generates the NxN patch around the * @brief InitializeAnchor generates the NxN patch around the
* feature in the Anchor image * feature in the Anchor image
@ -174,13 +172,6 @@ void Rhocost(const Eigen::Isometry3d& T_c0_ci,
Eigen::Vector3d& in_p) const; Eigen::Vector3d& in_p) const;
cv::Point2f projectPositionToCameraUndistorted(
const CAMState& cam_state,
const StateIDType& cam_state_id,
const CameraCalibration& cam,
Eigen::Vector3d& in_p) const;
/* /*
* @brief project PositionToCamera Takes a 3d position in a world frame * @brief project PositionToCamera Takes a 3d position in a world frame
* and projects it into the passed camera frame using pinhole projection * and projects it into the passed camera frame using pinhole projection
@ -757,7 +748,7 @@ bool Feature::VisualizePatch(
std::vector<double> projectionPatch; std::vector<double> projectionPatch;
for(auto point : anchorPatch_3d) for(auto point : anchorPatch_3d)
{ {
cv::Point2f p_in_c0 = projectPositionToCameraUndistorted(cam_state, cam_state_id, cam0, point); cv::Point2f p_in_c0 = projectPositionToCamera(cam_state, cam_state_id, cam0, point);
projectionPatch.push_back(PixelIrradiance(p_in_c0, current_image)); projectionPatch.push_back(PixelIrradiance(p_in_c0, current_image));
// visu - feature // visu - feature
cv::Point xs(p_in_c0.x, p_in_c0.y); cv::Point xs(p_in_c0.x, p_in_c0.y);
@ -964,35 +955,6 @@ cv::Point2f Feature::pixelDistanceAt(
return distance; return distance;
} }
cv::Point2f Feature::projectPositionToCameraUndistorted(
const CAMState& cam_state,
const StateIDType& cam_state_id,
const CameraCalibration& cam,
Eigen::Vector3d& in_p) const
{
Eigen::Isometry3d T_c0_w;
cv::Point2f out_p;
// transfrom position to camera frame
Eigen::Matrix3d R_w_c0 = quaternionToRotation(cam_state.orientation);
const Eigen::Vector3d& t_c0_w = cam_state.position;
Eigen::Vector3d p_c0 = R_w_c0 * (in_p-t_c0_w);
out_p = cv::Point2f(p_c0(0)/p_c0(2), p_c0(1)/p_c0(2));
// if(cam_state_id == observations.begin()->first)
//printf("undist:\n \tproj pos: %f, %f\n\ttrue pos: %f, %f\n", out_p.x, out_p.y, undist_anchor_center_pos.x, undist_anchor_center_pos.y);
cv::Point2f my_p(out_p.x * cam.intrinsics[0] + cam.intrinsics[2], out_p.y * cam.intrinsics[1] + cam.intrinsics[3]);
// printf("truPosition: %f, %f, %f\n", position.x(), position.y(), position.z());
// printf("camPosition: %f, %f, %f\n", p_c0(0), p_c0(1), p_c0(2));
// printf("Photo projection: %f, %f\n", my_p[0].x, my_p[0].y);
return my_p;
}
cv::Point2f Feature::projectPositionToCamera( cv::Point2f Feature::projectPositionToCamera(
const CAMState& cam_state, const CAMState& cam_state,
const StateIDType& cam_state_id, const StateIDType& cam_state_id,
@ -1009,11 +971,13 @@ cv::Point2f Feature::projectPositionToCamera(
Eigen::Vector3d p_c0 = R_w_c0 * (in_p-t_c0_w); Eigen::Vector3d p_c0 = R_w_c0 * (in_p-t_c0_w);
out_p = cv::Point2f(p_c0(0)/p_c0(2), p_c0(1)/p_c0(2)); out_p = cv::Point2f(p_c0(0)/p_c0(2), p_c0(1)/p_c0(2));
cv::Point2f my_p;
// if(cam_state_id == observations.begin()->first) // test is prewarped or not
//printf("undist:\n \tproj pos: %f, %f\n\ttrue pos: %f, %f\n", out_p.x, out_p.y, undist_anchor_center_pos.x, undist_anchor_center_pos.y); if (cam.distortion_model.substr(0,3) == "pre-")
my_p = cv::Point2f(out_p.x * cam.intrinsics[0] + cam.intrinsics[2], out_p.y * cam.intrinsics[1] + cam.intrinsics[3]);
cv::Point2f my_p = image_handler::distortPoint(out_p, else
my_p = image_handler::distortPoint(out_p,
cam.intrinsics, cam.intrinsics,
cam.distortion_model, cam.distortion_model,
cam.distortion_coeffs); cam.distortion_coeffs);
@ -1068,12 +1032,26 @@ bool Feature::initializeAnchor(const CameraCalibration& cam, int N)
auto u = anchor->second(0);//*cam.intrinsics[0] + cam.intrinsics[2]; auto u = anchor->second(0);//*cam.intrinsics[0] + cam.intrinsics[2];
auto v = anchor->second(1);//*cam.intrinsics[1] + cam.intrinsics[3]; auto v = anchor->second(1);//*cam.intrinsics[1] + cam.intrinsics[3];
//testing
undist_anchor_center_pos = cv::Point2f(u,v);
//for NxN patch pixels around feature // check if image has been pre-undistorted
int count = 0; if(cam.distortion_model.substr(0,3) == "pre-")
{
std::cout << "is a pre" << std::endl;
//project onto pixel plane
undist_anchor_center_pos = cv::Point2f(u * cam.intrinsics[0] + cam.intrinsics[2], v * cam.intrinsics[1] + cam.intrinsics[3]);
// create vector of patch in pixel plane
for(double u_run = -n; u_run <= n; u_run++)
for(double v_run = -n; v_run <= n; v_run++)
anchorPatch_real.push_back(cv::Point2f(undist_anchor_center_pos.x+u_run, undist_anchor_center_pos.y+v_run));
//project back into u,v
for(int i = 0; i < N*N; i++)
anchorPatch_ideal.push_back(cv::Point2f((anchorPatch_real[i].x-cam.intrinsics[2])/cam.intrinsics[0], (anchorPatch_real[i].y-cam.intrinsics[3])/cam.intrinsics[1]));
}
else
{
// get feature in undistorted pixel space // get feature in undistorted pixel space
// this only reverts from 'pure' space into undistorted pixel space using camera matrix // this only reverts from 'pure' space into undistorted pixel space using camera matrix
cv::Point2f und_pix_p = image_handler::distortPoint(cv::Point2f(u, v), cv::Point2f und_pix_p = image_handler::distortPoint(cv::Point2f(u, v),
@ -1093,80 +1071,8 @@ bool Feature::initializeAnchor(const CameraCalibration& cam, int N)
cam.distortion_coeffs, cam.distortion_coeffs,
anchorPatch_ideal); anchorPatch_ideal);
// save anchor position for later visualisaztion
anchor_center_pos = anchorPatch_real[(N*N-1)/2];
// save true pixel Patch position
for(auto point : anchorPatch_real)
if(point.x - n < 0 || point.x + n >= cam.resolution(0) || point.y - n < 0 || point.y + n >= cam.resolution(1))
return false;
for(auto point : anchorPatch_real)
anchorPatch.push_back(PixelIrradiance(point, anchorImage));
// project patch pixel to 3D space in camera coordinate system
for(auto point : anchorPatch_ideal)
anchorPatch_3d.push_back(AnchorPixelToPosition(point, cam));
is_anchored = true;
return true;
} }
//@test center projection must always be initial feature projection
bool Feature::initializeAnchorUndistort(const CameraCalibration& cam, int N)
{
//initialize patch Size
int n = (int)(N-1)/2;
auto anchor = observations.begin();
if(cam.moving_window.find(anchor->first) == cam.moving_window.end())
return false;
cv::Mat anchorImage = cam.moving_window.find(anchor->first)->second.image;
cv::Mat anchorImage_deeper;
anchorImage.convertTo(anchorImage_deeper,CV_16S);
//TODO remove this?
cv::Mat hen = cv::Mat::zeros(cv::Size(3, 3), CV_16S);
hen.at<int>(1,0) = 1;
//sobel test
/*
cv::Mat newhen;
cv::Sobel(hen, newhen, -1, 1, 0, 3);
std::cout << "newhen" << newhen << std::endl;
*/
cv::Sobel(anchorImage_deeper, xderImage, -1, 1, 0, 3);
cv::Sobel(anchorImage_deeper, yderImage, -1, 0, 1, 3);
xderImage/=4;
yderImage/=4;
cv::convertScaleAbs(xderImage, abs_xderImage);
cv::convertScaleAbs(yderImage, abs_yderImage);
cvWaitKey(0);
auto u = anchor->second(0);//*cam.intrinsics[0] + cam.intrinsics[2];
auto v = anchor->second(1);//*cam.intrinsics[1] + cam.intrinsics[3];
//project onto pixel plane
undist_anchor_center_pos = cv::Point2f(u * cam.intrinsics[0] + cam.intrinsics[2], v * cam.intrinsics[1] + cam.intrinsics[3]);
// create vector of patch in pixel plane
for(double u_run = -n; u_run <= n; u_run++)
for(double v_run = -n; v_run <= n; v_run++)
anchorPatch_real.push_back(cv::Point2f(undist_anchor_center_pos.x+u_run, undist_anchor_center_pos.y+v_run));
//project back into u,v
for(int i = 0; i < N*N; i++)
anchorPatch_ideal.push_back(cv::Point2f((anchorPatch_real[i].x-cam.intrinsics[2])/cam.intrinsics[0], (anchorPatch_real[i].y-cam.intrinsics[3])/cam.intrinsics[1]));
// save anchor position for later visualisaztion // save anchor position for later visualisaztion
anchor_center_pos = anchorPatch_real[(N*N-1)/2]; anchor_center_pos = anchorPatch_real[(N*N-1)/2];
@ -1183,6 +1089,7 @@ bool Feature::initializeAnchorUndistort(const CameraCalibration& cam, int N)
anchorPatch_3d.push_back(AnchorPixelToPosition(point, cam)); anchorPatch_3d.push_back(AnchorPixelToPosition(point, cam));
is_anchored = true; is_anchored = true;
return true; return true;
} }

View File

@ -19,6 +19,13 @@ namespace image_handler {
cv::Point2f pinholeDownProject(const cv::Point2f& p_in, const cv::Vec4d& intrinsics); cv::Point2f pinholeDownProject(const cv::Point2f& p_in, const cv::Vec4d& intrinsics);
cv::Point2f pinholeUpProject(const cv::Point2f& p_in, const cv::Vec4d& intrinsics); cv::Point2f pinholeUpProject(const cv::Point2f& p_in, const cv::Vec4d& intrinsics);
void undistortImage(
cv::InputArray src,
cv::OutputArray dst,
const std::string& distortion_model,
const cv::Vec4d& intrinsics,
const cv::Vec4d& distortion_coeffs);
void undistortPoints( void undistortPoints(
const std::vector<cv::Point2f>& pts_in, const std::vector<cv::Point2f>& pts_in,
const cv::Vec4d& intrinsics, const cv::Vec4d& intrinsics,

View File

@ -18,11 +18,11 @@
output="screen"> output="screen">
<!-- Photometry Flag--> <!-- Photometry Flag-->
<param name="PHOTOMETRIC" value="true"/> <param name="PHOTOMETRIC" value="false"/>
<!-- Debugging Flaggs --> <!-- Debugging Flaggs -->
<param name="StreamPause" value="false"/> <param name="StreamPause" value="true"/>
<param name="PrintImages" value="false"/> <param name="PrintImages" value="true"/>
<param name="GroundTruth" value="false"/> <param name="GroundTruth" value="false"/>
<param name="patch_size_n" value="5"/> <param name="patch_size_n" value="5"/>

View File

@ -24,6 +24,24 @@ cv::Point2f pinholeUpProject(const cv::Point2f& p_in, const cv::Vec4d& intrinsic
return cv::Point2f((p_in.x - intrinsics[2])/intrinsics[0], (p_in.y - intrinsics[3])/intrinsics[1]); return cv::Point2f((p_in.x - intrinsics[2])/intrinsics[0], (p_in.y - intrinsics[3])/intrinsics[1]);
} }
void undistortImage(
cv::InputArray src,
cv::OutputArray dst,
const std::string& distortion_model,
const cv::Vec4d& intrinsics,
const cv::Vec4d& distortion_coeffs)
{
const cv::Matx33d K(intrinsics[0], 0.0, intrinsics[2],
0.0, intrinsics[1], intrinsics[3],
0.0, 0.0, 1.0);
if (distortion_model == "pre-equidistant")
cv::fisheye::undistortImage(src, dst, K, distortion_coeffs, K);
else if (distortion_model == "equidistant")
src.copyTo(dst);
}
void undistortPoint( void undistortPoint(
const cv::Point2f& pt_in, const cv::Point2f& pt_in,
const cv::Vec4d& intrinsics, const cv::Vec4d& intrinsics,

View File

@ -215,8 +215,8 @@ void ImageProcessor::stereoCallback(
const sensor_msgs::ImageConstPtr& cam1_img) { const sensor_msgs::ImageConstPtr& cam1_img) {
// stop playing bagfile if printing images // stop playing bagfile if printing images
if(STREAMPAUSE) //if(STREAMPAUSE)
nh.setParam("/play_bag_image", false); // nh.setParam("/play_bag_image", false);
// Get the current image. // Get the current image.
cam0_curr_img_ptr = cv_bridge::toCvShare(cam0_img, cam0_curr_img_ptr = cv_bridge::toCvShare(cam0_img,
@ -225,16 +225,12 @@ void ImageProcessor::stereoCallback(
sensor_msgs::image_encodings::MONO8); sensor_msgs::image_encodings::MONO8);
const cv::Matx33d K0(cam0.intrinsics[0], 0.0, cam0.intrinsics[2], cv::Mat undistortedCam0;
0.0, cam0.intrinsics[1], cam0.intrinsics[3], cv::Mat undistortedCam1;
0.0, 0.0, 1.0);
const cv::Matx33d K1(cam1.intrinsics[0], 0.0, cam1.intrinsics[2],
0.0, cam1.intrinsics[1], cam1.intrinsics[3],
0.0, 0.0, 1.0);
cv::fisheye::undistortImage(cam0_curr_img_ptr->image, cam0_curr_img_ptr->image, K0, cam0.distortion_coeffs, K0); //image_handler::undistortImage(cam0_curr_img_ptr->image, cam0_curr_img_ptr->image, cam0.distortion_model, cam0.intrinsics, cam0.distortion_coeffs);
cv::fisheye::undistortImage(cam1_curr_img_ptr->image, cam1_curr_img_ptr->image, K1, cam1.distortion_coeffs, K1); //image_handler::undistortImage(cam1_curr_img_ptr->image, cam1_curr_img_ptr->image, cam1.distortion_model, cam1.intrinsics, cam1.distortion_coeffs);
// Build the image pyramids once since they're used at multiple places // Build the image pyramids once since they're used at multiple places
createImagePyramids(); createImagePyramids();
@ -306,8 +302,8 @@ void ImageProcessor::stereoCallback(
} }
// stop playing bagfile if printing images // stop playing bagfile if printing images
if(STREAMPAUSE) //if(STREAMPAUSE)
nh.setParam("/play_bag_image", true); // nh.setParam("/play_bag_image", true);
return; return;
} }

View File

@ -450,7 +450,7 @@ void MsckfVio::imageCallback(
double imu_processing_time = ( double imu_processing_time = (
ros::Time::now()-start_time).toSec(); ros::Time::now()-start_time).toSec();
//cout << "1" << endl; cout << "1" << endl;
// Augment the state vector. // Augment the state vector.
start_time = ros::Time::now(); start_time = ros::Time::now();
//truePhotometricStateAugmentation(feature_msg->header.stamp.toSec()); //truePhotometricStateAugmentation(feature_msg->header.stamp.toSec());
@ -555,16 +555,14 @@ void MsckfVio::manageMovingWindow(
// undistort Images // undistort Images
const cv::Matx33d K(cam0.intrinsics[0], 0.0, cam0.intrinsics[2], //cv::Mat undistortedCam0;
0.0, cam0.intrinsics[1], cam0.intrinsics[3], //cv::Mat undistortedCam1;
0.0, 0.0, 1.0);
cv::Mat undistortedCam0; //image_handler::undistortImage(cam0.moving_window[state_server.imu_state.id].image, undistortedCam0, cam0.distortion_model, cam0.intrinsics, cam0.distortion_coeffs);
cv::fisheye::undistortImage(cam0.moving_window[state_server.imu_state.id].image, undistortedCam0, K, cam0.distortion_coeffs, K); //image_handler::undistortImage(cam1.moving_window[state_server.imu_state.id].image, undistortedCam1, cam1.distortion_model, cam1.intrinsics, cam1.distortion_coeffs);
//cv::imshow("anchor", undistortedCam0);
//cvWaitKey(0); //cam0.moving_window[state_server.imu_state.id].image = undistortedCam0.clone();
cam0.moving_window[state_server.imu_state.id].image = undistortedCam0.clone(); //cam1.moving_window[state_server.imu_state.id].image = undistortedCam1.clone();
cam1.moving_window[state_server.imu_state.id].image = cam1_img_ptr->image.clone();
//TODO handle any massive overflow correctly (should be pruned, before this ever triggers) //TODO handle any massive overflow correctly (should be pruned, before this ever triggers)
while(cam0.moving_window.size() > 100) while(cam0.moving_window.size() > 100)
@ -1322,8 +1320,8 @@ bool MsckfVio::PhotometricMeasurementJacobian(
//cout << "____feature-measurement_____\n" << endl; //cout << "____feature-measurement_____\n" << endl;
Eigen::Vector3d p_c0 = R_w_c0 * (point-t_c0_w); Eigen::Vector3d p_c0 = R_w_c0 * (point-t_c0_w);
cv::Point2f p_in_c0 = feature.projectPositionToCameraUndistorted(cam_state, cam_state_id, cam0, point); cv::Point2f p_in_c0 = feature.projectPositionToCamera(cam_state, cam_state_id, cam0, point);
cv::Point2f p_in_anchor = feature.projectPositionToCameraUndistorted(anchor_state, anchor_state_id, cam0, point); cv::Point2f p_in_anchor = feature.projectPositionToCamera(anchor_state, anchor_state_id, cam0, point);
if( p_in_c0.x < 0 || p_in_c0.x >= cam0.resolution(0) || p_in_c0.y < 0 || p_in_c0.y >= cam0.resolution(1) ) if( p_in_c0.x < 0 || p_in_c0.x >= cam0.resolution(0) || p_in_c0.y < 0 || p_in_c0.y >= cam0.resolution(1) )
{ {
@ -1509,7 +1507,7 @@ void MsckfVio::PhotometricFeatureJacobian(
H_x = A_null_space.transpose() * H_xi; H_x = A_null_space.transpose() * H_xi;
r = A_null_space.transpose() * r_i; r = A_null_space.transpose() * r_i;
//cout << "\nx\n" << H_x.colPivHouseholderQr().solve(r) << endl; Eigen::MatrixXd xres = H_x.colPivHouseholderQr().solve(r);
if(PRINTIMAGES) if(PRINTIMAGES)
{ {
@ -1556,6 +1554,13 @@ void MsckfVio::PhotometricFeatureJacobian(
<< "# columns: " << 1 << "\n" << "# columns: " << 1 << "\n"
<< r_i << endl; << r_i << endl;
myfile << "# name: xres\n"
<< "# type: matrix\n"
<< "# rows: " << xres.rows() << "\n"
<< "# columns: " << xres.cols() << "\n"
<< xres << endl;
myfile.close(); myfile.close();
cout << "---------- LOGGED -------- " << endl; cout << "---------- LOGGED -------- " << endl;
cvWaitKey(0); cvWaitKey(0);
@ -1696,6 +1701,7 @@ void MsckfVio::featureJacobian(
H_x = A.transpose() * H_xj; H_x = A.transpose() * H_xj;
r = A.transpose() * r_j; r = A.transpose() * r_j;
Eigen::MatrixXd xres = H_x.colPivHouseholderQr().solve(r);
// stop playing bagfile if printing images // stop playing bagfile if printing images
if(PRINTIMAGES) if(PRINTIMAGES)
{ {
@ -1715,13 +1721,19 @@ void MsckfVio::featureJacobian(
<< "# columns: " << H_fj.cols() << "\n" << "# columns: " << H_fj.cols() << "\n"
<< H_fj << endl; << H_fj << endl;
myfile << "# name: r\n" myfile << "# name: r\n"
<< "# type: matrix\n" << "# type: matrix\n"
<< "# rows: " << r_j.rows() << "\n" << "# rows: " << r_j.rows() << "\n"
<< "# columns: " << 1 << "\n" << "# columns: " << 1 << "\n"
<< r_j << endl; << r_j << endl;
myfile << "# name: xres\n"
<< "# type: matrix\n"
<< "# rows: " << xres.rows() << "\n"
<< "# columns: " << 1 << "\n"
<< xres << endl;
myfile.close(); myfile.close();
cout << "---------- LOGGED ORG -------- " << endl; cout << "---------- LOGGED ORG -------- " << endl;
} }
@ -2002,7 +2014,7 @@ void MsckfVio::removeLostFeatures() {
} }
if(!feature.is_anchored) if(!feature.is_anchored)
{ {
if(!feature.initializeAnchorUndistort(cam0, N)) if(!feature.initializeAnchor(cam0, N))
{ {
invalid_feature_ids.push_back(feature.id); invalid_feature_ids.push_back(feature.id);
continue; continue;
@ -2049,20 +2061,20 @@ void MsckfVio::removeLostFeatures() {
MatrixXd pH_xj; MatrixXd pH_xj;
VectorXd pr_j; VectorXd pr_j;
PhotometricFeatureJacobian(feature.id, cam_state_ids, pH_xj, pr_j); //PhotometricFeatureJacobian(feature.id, cam_state_ids, pH_xj, pr_j);
featureJacobian(feature.id, cam_state_ids, H_xj, r_j); featureJacobian(feature.id, cam_state_ids, H_xj, r_j);
if (gatingTest(H_xj, r_j, r_j.size())) { //, cam_state_ids.size()-1)) { if (gatingTest(H_xj, r_j, r_j.size())) { //, cam_state_ids.size()-1)) {
H_x.block(stack_cntr, 0, H_xj.rows(), H_xj.cols()) = H_xj; H_x.block(stack_cntr, 0, H_xj.rows(), H_xj.cols()) = H_xj;
r.segment(stack_cntr, r_j.rows()) = r_j; r.segment(stack_cntr, r_j.rows()) = r_j;
stack_cntr += H_xj.rows(); stack_cntr += H_xj.rows();
} }
/*
if (gatingTest(pH_xj, pr_j, pr_j.size())) { //, cam_state_ids.size()-1)) { if (gatingTest(pH_xj, pr_j, pr_j.size())) { //, cam_state_ids.size()-1)) {
pH_x.block(pstack_cntr, 0, pH_xj.rows(), pH_xj.cols()) = pH_xj; pH_x.block(pstack_cntr, 0, pH_xj.rows(), pH_xj.cols()) = pH_xj;
pr.segment(pstack_cntr, pr_j.rows()) = pr_j; pr.segment(pstack_cntr, pr_j.rows()) = pr_j;
pstack_cntr += pH_xj.rows(); pstack_cntr += pH_xj.rows();
} }
*/
// Put an upper bound on the row size of measurement Jacobian, // Put an upper bound on the row size of measurement Jacobian,
// which helps guarantee the executation time. // which helps guarantee the executation time.
//if (stack_cntr > 1500) break; //if (stack_cntr > 1500) break;
@ -2075,7 +2087,7 @@ void MsckfVio::removeLostFeatures() {
// Perform the measurement update step. // Perform the measurement update step.
measurementUpdate(H_x, r); measurementUpdate(H_x, r);
photometricMeasurementUpdate(pH_x, pr); //photometricMeasurementUpdate(pH_x, pr);
// Remove all processed features from the map. // Remove all processed features from the map.
for (const auto& feature_id : processed_feature_ids) for (const auto& feature_id : processed_feature_ids)
@ -2168,7 +2180,7 @@ void MsckfVio::pruneLastCamStateBuffer()
} }
if(!feature.is_anchored) if(!feature.is_anchored)
{ {
if(!feature.initializeAnchorUndistort(cam0, N)) if(!feature.initializeAnchor(cam0, N))
{ {
feature.observations.erase(rm_cam_state_id); feature.observations.erase(rm_cam_state_id);
continue; continue;
@ -2205,7 +2217,7 @@ void MsckfVio::pruneLastCamStateBuffer()
for (const auto& cam_state : state_server.cam_states) for (const auto& cam_state : state_server.cam_states)
involved_cam_state_ids.push_back(cam_state.first); involved_cam_state_ids.push_back(cam_state.first);
PhotometricFeatureJacobian(feature.id, involved_cam_state_ids, pH_xj, pr_j); //PhotometricFeatureJacobian(feature.id, involved_cam_state_ids, pH_xj, pr_j);
featureJacobian(feature.id, involved_cam_state_ids, H_xj, r_j); featureJacobian(feature.id, involved_cam_state_ids, H_xj, r_j);
if (gatingTest(H_xj, r_j, r_j.size())) {// involved_cam_state_ids.size())) { if (gatingTest(H_xj, r_j, r_j.size())) {// involved_cam_state_ids.size())) {
@ -2214,11 +2226,13 @@ void MsckfVio::pruneLastCamStateBuffer()
stack_cntr += H_xj.rows(); stack_cntr += H_xj.rows();
pruned_cntr++; pruned_cntr++;
} }
/*
if (gatingTest(pH_xj, pr_j, pr_j.size())) {// involved_cam_state_ids.size())) { if (gatingTest(pH_xj, pr_j, pr_j.size())) {// involved_cam_state_ids.size())) {
pH_x.block(pstack_cntr, 0, pH_xj.rows(), pH_xj.cols()) = pH_xj; pH_x.block(pstack_cntr, 0, pH_xj.rows(), pH_xj.cols()) = pH_xj;
pr.segment(pstack_cntr, pr_j.rows()) = pr_j; pr.segment(pstack_cntr, pr_j.rows()) = pr_j;
pstack_cntr += pH_xj.rows(); pstack_cntr += pH_xj.rows();
} }
*/
for (const auto& cam_id : involved_cam_state_ids) for (const auto& cam_id : involved_cam_state_ids)
feature.observations.erase(cam_id); feature.observations.erase(cam_id);
} }
@ -2231,7 +2245,7 @@ void MsckfVio::pruneLastCamStateBuffer()
// Perform measurement update. // Perform measurement update.
measurementUpdate(H_x, r); measurementUpdate(H_x, r);
photometricMeasurementUpdate(pH_x, pr); //photometricMeasurementUpdate(pH_x, pr);
//reduction //reduction
int cam_sequence = std::distance(state_server.cam_states.begin(), int cam_sequence = std::distance(state_server.cam_states.begin(),
@ -2320,7 +2334,7 @@ void MsckfVio::pruneCamStateBuffer() {
} }
if(!feature.is_anchored) if(!feature.is_anchored)
{ {
if(!feature.initializeAnchorUndistort(cam0, N)) if(!feature.initializeAnchor(cam0, N))
{ {
for (const auto& cam_id : involved_cam_state_ids) for (const auto& cam_id : involved_cam_state_ids)
feature.observations.erase(cam_id); feature.observations.erase(cam_id);
@ -2358,7 +2372,7 @@ void MsckfVio::pruneCamStateBuffer() {
if (involved_cam_state_ids.size() == 0) continue; if (involved_cam_state_ids.size() == 0) continue;
PhotometricFeatureJacobian(feature.id, involved_cam_state_ids, pH_xj, pr_j); //PhotometricFeatureJacobian(feature.id, involved_cam_state_ids, pH_xj, pr_j);
featureJacobian(feature.id, involved_cam_state_ids, H_xj, r_j); featureJacobian(feature.id, involved_cam_state_ids, H_xj, r_j);
if (gatingTest(H_xj, r_j, involved_cam_state_ids.size())) {// involved_cam_state_ids.size())) { if (gatingTest(H_xj, r_j, involved_cam_state_ids.size())) {// involved_cam_state_ids.size())) {
@ -2366,13 +2380,13 @@ void MsckfVio::pruneCamStateBuffer() {
r.segment(stack_cntr, r_j.rows()) = r_j; r.segment(stack_cntr, r_j.rows()) = r_j;
stack_cntr += H_xj.rows(); stack_cntr += H_xj.rows();
} }
/*
if (gatingTest(pH_xj, pr_j, pr_j.size())) {// involved_cam_state_ids.size())) { if (gatingTest(pH_xj, pr_j, pr_j.size())) {// involved_cam_state_ids.size())) {
pH_x.block(pstack_cntr, 0, pH_xj.rows(), pH_xj.cols()) = pH_xj; pH_x.block(pstack_cntr, 0, pH_xj.rows(), pH_xj.cols()) = pH_xj;
pr.segment(pstack_cntr, pr_j.rows()) = pr_j; pr.segment(pstack_cntr, pr_j.rows()) = pr_j;
pstack_cntr += pH_xj.rows(); pstack_cntr += pH_xj.rows();
} }
*/
for (const auto& cam_id : involved_cam_state_ids) for (const auto& cam_id : involved_cam_state_ids)
feature.observations.erase(cam_id); feature.observations.erase(cam_id);
} }
@ -2385,7 +2399,7 @@ void MsckfVio::pruneCamStateBuffer() {
// Perform measurement update. // Perform measurement update.
measurementUpdate(H_x, r); measurementUpdate(H_x, r);
photometricMeasurementUpdate(pH_x, pr); //photometricMeasurementUpdate(pH_x, pr);
//reduction //reduction
for (const auto& cam_id : rm_cam_state_ids) { for (const auto& cam_id : rm_cam_state_ids) {