diff --git a/include/msckf_vio/feature.hpp b/include/msckf_vio/feature.hpp index 5026c3d..3793d49 100644 --- a/include/msckf_vio/feature.hpp +++ b/include/msckf_vio/feature.hpp @@ -157,12 +157,19 @@ struct Feature { * */ - bool IrradianceOfAnchorPatch( + bool estimate_FrameIrradiance( const CAMState& cam_state, const StateIDType& cam_state_id, - const CameraCalibration& cam, + const CameraCalibration& cam0, const movingWindow& cam0_moving_window, - std::vector& anchorPatch_measurement) const; + std::vector& anchorPatch_estimate) const; + + bool FrameIrradiance( + const CAMState& cam_state, + const StateIDType& cam_state_id, + const CameraCalibration& cam0, + const movingWindow& cam0_moving_window, + std::vector& anchorPatch_measurement) const; /* * @brief projectPixelToPosition uses the calcualted pixels @@ -175,7 +182,7 @@ inline Eigen::Vector3d projectPixelToPosition(cv::Point2f in_p, * @brief Irradiance returns irradiance value of a pixel */ - inline uint8_t Irradiance(cv::Point2f pose, cv::Mat image) const; + inline float PixelIrradiance(cv::Point2f pose, cv::Mat image) const; // An unique identifier for the feature. // In case of long time running, the variable @@ -350,25 +357,56 @@ bool Feature::checkMotion( else return false; } -bool Feature::IrradianceOfAnchorPatch( +bool Feature::estimate_FrameIrradiance( const CAMState& cam_state, const StateIDType& cam_state_id, - const CameraCalibration& cam, + const CameraCalibration& cam0, const movingWindow& cam0_moving_window, - std::vector& anchorPatch_measurement) const + std::vector& anchorPatch_estimate) const +{ + // get irradiance of patch in anchor frame + // subtract estimated b and divide by a of anchor frame + // muliply by a and add b of this frame + + auto anchor = observations.begin(); + if(cam0_moving_window.find(anchor->first) == cam0_moving_window.end()) + return false; + + double anchorExposureTime_ms = cam0_moving_window.find(anchor->first)->second.exposureTime_ms; + double frameExposureTime_ms = cam0_moving_window.find(cam_state_id)->second.exposureTime_ms; + + + double a_A = anchorExposureTime_ms; + double b_A = 0; + double a_l =frameExposureTime_ms; + double b_l = 0; + for (double anchorPixel : anchorPatch) + { + float irradiance = ((anchorPixel - b_A) / a_A ) * a_l - b_l; + anchorPatch_estimate.push_back(irradiance); + } + +} + +bool Feature::FrameIrradiance( + const CAMState& cam_state, + const StateIDType& cam_state_id, + const CameraCalibration& cam0, + const movingWindow& cam0_moving_window, + std::vector& anchorPatch_measurement) const { //project every point in anchorPatch_3d. for (auto point : anchorPatch_3d) { - cv::Point2f p_in_c0 = projectPositionToCamera(cam_state, cam_state_id, cam, point); - uint8_t irradiance = Irradiance(p_in_c0 , cam0_moving_window.find(cam_state_id)->second.image); + cv::Point2f p_in_c0 = projectPositionToCamera(cam_state, cam_state_id, cam0, point); + float irradiance = PixelIrradiance(p_in_c0, cam0_moving_window.find(cam_state_id)->second.image); anchorPatch_measurement.push_back(irradiance); } } -uint8_t Feature::Irradiance(cv::Point2f pose, cv::Mat image) const +float Feature::PixelIrradiance(cv::Point2f pose, cv::Mat image) const { - return image.at(pose.x, pose.y); + return (float)image.at(pose.x, pose.y); } cv::Point2f Feature::projectPositionToCamera( @@ -389,7 +427,10 @@ cv::Point2f Feature::projectPositionToCamera( out_p = cv::Point2f(p_c0(0)/p_c0(2), p_c0(1)/p_c0(2)); std::vector out_v; out_v.push_back(out_p); - std::vector my_p = image_handler::distortPoints(out_v, cam.intrinsics, cam.distortion_model, cam.distortion_coeffs); + std::vector my_p = image_handler::distortPoints(out_v, + cam.intrinsics, + cam.distortion_model, + cam.distortion_coeffs); // printf("truPosition: %f, %f, %f\n", position.x(), position.y(), position.z()); // printf("camPosition: %f, %f, %f\n", p_c0(0), p_c0(1), p_c0(2)); @@ -418,6 +459,8 @@ bool Feature::initializeAnchor( const CameraCalibration& cam) { + //initialize patch Size + //TODO make N size a ros parameter int N = 3; int n = (int)(N-1)/2; @@ -428,25 +471,32 @@ bool Feature::initializeAnchor( cv::Mat anchorImage = cam0_moving_window.find(anchor->first)->second.image; auto u = anchor->second(0)*cam.intrinsics[0] + cam.intrinsics[2]; auto v = anchor->second(1)*cam.intrinsics[1] + cam.intrinsics[3]; - int count = 0; - //go through surrounding pixels + //for NxN patch pixels around feature for(double u_run = u - n; u_run <= u + n; u_run = u_run + 1) { for(double v_run = v - n; v_run <= v + n; v_run = v_run + 1) { - anchorPatch.push_back(anchorImage.at((int)u_run,(int)v_run)); - cv::Point2f currentPoint((u_run-cam.intrinsics[2])/cam.intrinsics[0], (v_run-cam.intrinsics[3])/cam.intrinsics[1]); + // add irradiance information + anchorPatch.push_back((double)anchorImage.at((int)u_run,(int)v_run)); + + // project patch pixel to 3D space + auto intr = cam.intrinsics; + cv::Point2f currentPoint((u_run-intr[2])/intr[0], (v_run-intr[3])/intr[1]); Eigen::Vector3d Npose = projectPixelToPosition(currentPoint, cam); + + //save position anchorPatch_3d.push_back(Npose); } } + //TODO test if NxN patch can be selected return true; } bool Feature::initializePosition( const CamStateServer& cam_states) { + // Organize camera poses and feature observations properly. std::vector > cam_poses(0); diff --git a/src/msckf_vio.cpp b/src/msckf_vio.cpp index 3625541..2a1f37a 100644 --- a/src/msckf_vio.cpp +++ b/src/msckf_vio.cpp @@ -344,7 +344,7 @@ void MsckfVio::imageCallback( // Add new images to moving window start_time = ros::Time::now(); - manageMovingWindow(cam0_img, cam1_img, feature_msg); + //manageMovingWindow(cam0_img, cam1_img, feature_msg); double manage_moving_window_time = ( ros::Time::now()-start_time).toSec(); @@ -373,20 +373,20 @@ void MsckfVio::imageCallback( processing_end_time - processing_start_time; if (processing_time > 1.0/frame_rate) { ++critical_time_cntr; - //ROS_INFO("\033[1;31mTotal processing time %f/%d...\033[0m", - // processing_time, critical_time_cntr); - //printf("IMU processing time: %f/%f\n", - // imu_processing_time, imu_processing_time/processing_time); - //printf("State augmentation time: %f/%f\n", - // state_augmentation_time, state_augmentation_time/processing_time); - //printf("Add observations time: %f/%f\n", - // add_observations_time, add_observations_time/processing_time); - //printf("Remove lost features time: %f/%f\n", - // remove_lost_features_time, remove_lost_features_time/processing_time); - //printf("Remove camera states time: %f/%f\n", - // prune_cam_states_time, prune_cam_states_time/processing_time); - //printf("Publish time: %f/%f\n", - // publish_time, publish_time/processing_time); + ROS_INFO("\033[1;31mTotal processing time %f/%d...\033[0m", + processing_time, critical_time_cntr); + printf("IMU processing time: %f/%f\n", + imu_processing_time, imu_processing_time/processing_time); + printf("State augmentation time: %f/%f\n", + state_augmentation_time, state_augmentation_time/processing_time); + printf("Add observations time: %f/%f\n", + add_observations_time, add_observations_time/processing_time); + printf("Remove lost features time: %f/%f\n", + remove_lost_features_time, remove_lost_features_time/processing_time); + printf("Remove camera states time: %f/%f\n", + prune_cam_states_time, prune_cam_states_time/processing_time); + printf("Publish time: %f/%f\n", + publish_time, publish_time/processing_time); } return; @@ -400,7 +400,15 @@ void MsckfVio::manageMovingWindow( //save exposure Time into moving window cam0_moving_window[state_server.imu_state.id].exposureTime_ms = strtod(cam0_img->header.frame_id.data(), NULL) / 1000; cam1_moving_window[state_server.imu_state.id].exposureTime_ms = strtod(cam1_img->header.frame_id.data(), NULL) / 1000; - printf("exposure: %f\n", cam0_moving_window[state_server.imu_state.id].exposureTime_ms); + if(cam0_moving_window[state_server.imu_state.id].exposureTime_ms < 1) + cam0_moving_window[state_server.imu_state.id].exposureTime_ms = 1; + if(cam1_moving_window[state_server.imu_state.id].exposureTime_ms < 1) + cam1_moving_window[state_server.imu_state.id].exposureTime_ms = 1; + if(cam0_moving_window[state_server.imu_state.id].exposureTime_ms > 500) + cam0_moving_window[state_server.imu_state.id].exposureTime_ms = 500; + if(cam1_moving_window[state_server.imu_state.id].exposureTime_ms > 500) + cam1_moving_window[state_server.imu_state.id].exposureTime_ms = 500; + // Get the current image. cv_bridge::CvImageConstPtr cam0_img_ptr = cv_bridge::toCvShare(cam0_img, sensor_msgs::image_encodings::MONO8); @@ -908,8 +916,8 @@ void MsckfVio::PhotometricMeasurementJacobian( const Vector4d& z = feature.observations.find(cam_state_id)->second; //photometric observation - std::vector photo_z; - feature.IrradianceOfAnchorPatch(cam_state, cam_state_id, cam0, cam0_moving_window, photo_z); + std::vector photo_z; + //feature.FrameIrradiance(cam_state, cam_state_id, cam0, cam0_moving_window, photo_z); // Convert the feature position from the world frame to // the cam0 and cam1 frame. @@ -973,6 +981,20 @@ void MsckfVio::PhotometricMeasurementJacobian( r = z - Vector4d(p_c0(0)/p_c0(2), p_c0(1)/p_c0(2), p_c1(0)/p_c1(2), p_c1(1)/p_c1(2)); + //estimate photometric measurement + std::vector estimate_photo_z; + //feature.estimate_FrameIrradiance(cam_state, cam_state_id, cam0, cam0_moving_window, estimate_photo_z); + std::vector photo_r; + + //calculate photom. residual + //for(int i = 0; i < photo_z.size(); i++) + // photo_r.push_back(photo_z[i] - estimate_photo_z[i]); + + // printf("-----\n"); + // for(int i = 0; i < photo_z.size(); i++) + // printf("%.4f - %.4f\n", photo_z[i], estimate_photo_z[i]); + + photo_z.clear(); return; } @@ -1002,6 +1024,7 @@ void MsckfVio::PhotometricFeatureJacobian( VectorXd r_j = VectorXd::Zero(jacobian_row_size); int stack_cntr = 0; + for (const auto& cam_id : valid_cam_state_ids) { Matrix H_xi = Matrix::Zero(); @@ -1318,12 +1341,11 @@ void MsckfVio::removeLostFeatures() { continue; } } - } - - if(!feature.initializeAnchor(cam0_moving_window, cam0)) - { - invalid_feature_ids.push_back(feature.id); - continue; + /*if(!feature.initializeAnchor(cam0_moving_window, cam0)) + { + invalid_feature_ids.push_back(feature.id); + continue; + }*/ } jacobian_row_size += 4*feature.observations.size() - 3; @@ -1472,13 +1494,12 @@ void MsckfVio::pruneCamStateBuffer() { continue; } } - } - - if(!feature.initializeAnchor(cam0_moving_window, cam0)) - { - for (const auto& cam_id : involved_cam_state_ids) - feature.observations.erase(cam_id); - continue; + /*if(!feature.initializeAnchor(cam0_moving_window, cam0)) + { + for (const auto& cam_id : involved_cam_state_ids) + feature.observations.erase(cam_id); + continue; + }*/ } jacobian_row_size += 4*involved_cam_state_ids.size() - 3; @@ -1491,7 +1512,7 @@ void MsckfVio::pruneCamStateBuffer() { 21+6*state_server.cam_states.size()); VectorXd r = VectorXd::Zero(jacobian_row_size); int stack_cntr = 0; - + ros::Time start_time = ros::Time::now(); for (auto& item : map_server) { auto& feature = item.second; // Check how many camera states to be removed are associated @@ -1507,8 +1528,8 @@ void MsckfVio::pruneCamStateBuffer() { MatrixXd H_xj; VectorXd r_j; + PhotometricFeatureJacobian(feature.id, involved_cam_state_ids, H_xj, r_j); - if (gatingTest(H_xj, r_j, involved_cam_state_ids.size())) { H_x.block(stack_cntr, 0, H_xj.rows(), H_xj.cols()) = H_xj; r.segment(stack_cntr, r_j.rows()) = r_j; @@ -1518,13 +1539,15 @@ void MsckfVio::pruneCamStateBuffer() { for (const auto& cam_id : involved_cam_state_ids) feature.observations.erase(cam_id); } + double anchorPrune_processing_time = (ros::Time::now()-start_time).toSec(); + printf("FeatureJacobian Time: %f\n", anchorPrune_processing_time); H_x.conservativeResize(stack_cntr, H_x.cols()); r.conservativeResize(stack_cntr); // Perform measurement update. measurementUpdate(H_x, r); - + for (const auto& cam_id : rm_cam_state_ids) { int cam_sequence = std::distance(state_server.cam_states.begin(), state_server.cam_states.find(cam_id));