image reprojection visualization in images

This commit is contained in:
Raphael Maenle 2019-04-19 13:11:19 +02:00
parent 1fa2518215
commit 6f16f1b566
4 changed files with 99 additions and 58 deletions

View File

@ -16,6 +16,15 @@
namespace msckf_vio {
struct Frame{
cv::Mat image;
double exposureTime_ms;
};
typedef std::map<StateIDType, Frame, std::less<StateIDType>,
Eigen::aligned_allocator<
std::pair<StateIDType, Frame> > > movingWindow;
struct IlluminationParameter{
double frame_bias;
double frame_gain;
@ -28,12 +37,11 @@ struct CameraCalibration{
cv::Vec2i resolution;
cv::Vec4d intrinsics;
cv::Vec4d distortion_coeffs;
movingWindow moving_window;
cv::Mat featureVisu;
};
struct Frame{
cv::Mat image;
double exposureTime_ms;
};
/*
* @brief CAMState Stored camera state in order to
@ -85,10 +93,6 @@ struct CAMState {
typedef std::map<StateIDType, CAMState, std::less<int>,
Eigen::aligned_allocator<
std::pair<const StateIDType, CAMState> > > CamStateServer;
typedef std::map<StateIDType, Frame, std::less<StateIDType>,
Eigen::aligned_allocator<
std::pair<StateIDType, Frame> > > movingWindow;
} // namespace msckf_vio
#endif // MSCKF_VIO_CAM_STATE_H

View File

@ -126,7 +126,6 @@ struct Feature {
*/
bool initializeAnchor(
const movingWindow& cam0_moving_window,
const CameraCalibration& cam);
@ -165,15 +164,13 @@ struct Feature {
bool estimate_FrameIrradiance(
const CAMState& cam_state,
const StateIDType& cam_state_id,
const CameraCalibration& cam0,
const movingWindow& cam0_moving_window,
CameraCalibration& cam0,
std::vector<float>& anchorPatch_estimate) const;
bool FrameIrradiance(
const CAMState& cam_state,
const StateIDType& cam_state_id,
const CameraCalibration& cam0,
const movingWindow& cam0_moving_window,
CameraCalibration& cam0,
std::vector<float>& anchorPatch_measurement) const;
/*
@ -368,8 +365,7 @@ bool Feature::checkMotion(
bool Feature::estimate_FrameIrradiance(
const CAMState& cam_state,
const StateIDType& cam_state_id,
const CameraCalibration& cam0,
const movingWindow& cam0_moving_window,
CameraCalibration& cam0,
std::vector<float>& anchorPatch_estimate) const
{
// get irradiance of patch in anchor frame
@ -377,11 +373,11 @@ bool Feature::estimate_FrameIrradiance(
// muliply by a and add b of this frame
auto anchor = observations.begin();
if(cam0_moving_window.find(anchor->first) == cam0_moving_window.end())
if(cam0.moving_window.find(anchor->first) == cam0.moving_window.end())
return false;
double anchorExposureTime_ms = cam0_moving_window.find(anchor->first)->second.exposureTime_ms;
double frameExposureTime_ms = cam0_moving_window.find(cam_state_id)->second.exposureTime_ms;
double anchorExposureTime_ms = cam0.moving_window.find(anchor->first)->second.exposureTime_ms;
double frameExposureTime_ms = cam0.moving_window.find(cam_state_id)->second.exposureTime_ms;
double a_A = anchorExposureTime_ms;
@ -401,12 +397,15 @@ bool Feature::estimate_FrameIrradiance(
bool Feature::FrameIrradiance(
const CAMState& cam_state,
const StateIDType& cam_state_id,
const CameraCalibration& cam0,
const movingWindow& cam0_moving_window,
CameraCalibration& cam0,
std::vector<float>& anchorPatch_measurement) const
{
// project every point in anchorPatch_3d.
// int count = 0;
cv::Mat current_image = cam0.moving_window.find(cam_state_id)->second.image;
cv::Mat dottedFrame(current_image.size(), CV_8UC3);
cv::cvtColor(current_image, dottedFrame, CV_GRAY2RGB);
for (auto point : anchorPatch_3d)
{
// testing
@ -414,8 +413,14 @@ bool Feature::FrameIrradiance(
//if(count == 4)
//printf("\n\ncenter:\n");
cv::Point2f p_in_c0 = projectPositionToCamera(cam_state, cam_state_id, cam0, point);
float irradiance = PixelIrradiance(p_in_c0, cam0_moving_window.find(cam_state_id)->second.image);
cv::Point2f p_in_c0 = projectPositionToCamera(cam_state, cam_state_id, cam0, point);
//visualization of feature
cv::Point xs(p_in_c0.x, p_in_c0.y);
cv::Point ys(p_in_c0.x, p_in_c0.y);
cv::rectangle(dottedFrame, xs, ys, cv::Scalar(0,255,0));
float irradiance = PixelIrradiance(p_in_c0, cam0.moving_window.find(cam_state_id)->second.image);
anchorPatch_measurement.push_back(irradiance);
// testing
@ -424,6 +429,15 @@ bool Feature::FrameIrradiance(
//printf("dist:\n \tpos: %f, %f\n\ttrue pos: %f, %f\n\n", p_in_c0.x, p_in_c0.y, anchor_center_pos.x, anchor_center_pos.y);
}
//visu
//cv::resize(dottedFrame, dottedFrame, cv::Size(dottedFrame.cols*0.2, dottedFrame.rows*0.2));
if(cam0.featureVisu.empty())
cam0.featureVisu = dottedFrame.clone();
else
cv::hconcat(cam0.featureVisu, dottedFrame, cam0.featureVisu);
}
float Feature::PixelIrradiance(cv::Point2f pose, cv::Mat image) const
@ -476,10 +490,8 @@ Eigen::Vector3d Feature::projectPixelToPosition(cv::Point2f in_p,
//printf("%f, %f, %f\n",PositionInWorld[0], PositionInWorld[1], PositionInWorld[2]);
}
//@test center projection must always be initial feature projection
bool Feature::initializeAnchor(
const movingWindow& cam0_moving_window,
const CameraCalibration& cam)
{
@ -489,10 +501,11 @@ bool Feature::initializeAnchor(
int n = (int)(N-1)/2;
auto anchor = observations.begin();
if(cam0_moving_window.find(anchor->first) == cam0_moving_window.end())
if(cam.moving_window.find(anchor->first) == cam.moving_window.end())
return false;
cv::Mat anchorImage = cam0_moving_window.find(anchor->first)->second.image;
cv::Mat anchorImage = cam.moving_window.find(anchor->first)->second.image;
auto u = anchor->second(0);//*cam.intrinsics[0] + cam.intrinsics[2];
auto v = anchor->second(1);//*cam.intrinsics[1] + cam.intrinsics[3];
@ -513,6 +526,8 @@ bool Feature::initializeAnchor(
for(double v_run = -n; v_run <= n; v_run++)
und_pix_v.push_back(cv::Point2f(und_pix_p.x-u_run, und_pix_p.y-v_run));
//create undistorted pure points
std::vector<cv::Point2f> und_v;
image_handler::undistortPoints(und_pix_v,
@ -526,8 +541,9 @@ bool Feature::initializeAnchor(
cam.distortion_model,
cam.distortion_coeffs);
anchor_center_pos = vec[4];
// save anchor position for later visualisaztion
anchor_center_pos = vec[4];
for(auto point : vec)
{
if(point.x - n < 0 || point.x + n >= cam.resolution(0) || point.y - n < 0 || point.y + n >= cam.resolution(1))

View File

@ -225,6 +225,9 @@ class MsckfVio {
CameraCalibration cam1;
ros::Time image_save_time;
// Indicate if the gravity vector is set.
bool is_gravity_set;

View File

@ -220,6 +220,12 @@ bool MsckfVio::loadParameters() {
cout << T_imu_cam0.linear() << endl;
cout << T_imu_cam0.translation().transpose() << endl;
cout << "OpenCV version : " << CV_VERSION << endl;
cout << "Major version : " << CV_MAJOR_VERSION << endl;
cout << "Minor version : " << CV_MINOR_VERSION << endl;
cout << "Subminor version : " << CV_SUBMINOR_VERSION << endl;
ROS_INFO("max camera state #: %d", max_cam_state_size);
ROS_INFO("===========================================");
return true;
@ -392,37 +398,37 @@ void MsckfVio::imageCallback(
}
void MsckfVio::manageMovingWindow(
const sensor_msgs::ImageConstPtr& cam0_img,
const sensor_msgs::ImageConstPtr& cam1_img,
const CameraMeasurementConstPtr& feature_msg) {
const sensor_msgs::ImageConstPtr& cam0_img,
const sensor_msgs::ImageConstPtr& cam1_img,
const CameraMeasurementConstPtr& feature_msg) {
//save exposure Time into moving window
cam0_moving_window[state_server.imu_state.id].exposureTime_ms = strtod(cam0_img->header.frame_id.data(), NULL) / 1000000;
cam1_moving_window[state_server.imu_state.id].exposureTime_ms = strtod(cam1_img->header.frame_id.data(), NULL) / 1000000;
if(cam0_moving_window[state_server.imu_state.id].exposureTime_ms < 1)
cam0_moving_window[state_server.imu_state.id].exposureTime_ms = 1;
if(cam1_moving_window[state_server.imu_state.id].exposureTime_ms < 1)
cam1_moving_window[state_server.imu_state.id].exposureTime_ms = 1;
if(cam0_moving_window[state_server.imu_state.id].exposureTime_ms > 100)
cam0_moving_window[state_server.imu_state.id].exposureTime_ms = 100;
if(cam1_moving_window[state_server.imu_state.id].exposureTime_ms > 100)
cam1_moving_window[state_server.imu_state.id].exposureTime_ms = 100;
//save exposure Time into moving window
cam0.moving_window[state_server.imu_state.id].exposureTime_ms = strtod(cam0_img->header.frame_id.data(), NULL) / 1000000;
cam1.moving_window[state_server.imu_state.id].exposureTime_ms = strtod(cam1_img->header.frame_id.data(), NULL) / 1000000;
if(cam0.moving_window[state_server.imu_state.id].exposureTime_ms < 1)
cam0.moving_window[state_server.imu_state.id].exposureTime_ms = 1;
if(cam1.moving_window[state_server.imu_state.id].exposureTime_ms < 1)
cam1.moving_window[state_server.imu_state.id].exposureTime_ms = 1;
if(cam0.moving_window[state_server.imu_state.id].exposureTime_ms > 100)
cam0.moving_window[state_server.imu_state.id].exposureTime_ms = 100;
if(cam1.moving_window[state_server.imu_state.id].exposureTime_ms > 100)
cam1.moving_window[state_server.imu_state.id].exposureTime_ms = 100;
// Get the current image.
cv_bridge::CvImageConstPtr cam0_img_ptr = cv_bridge::toCvShare(cam0_img,
sensor_msgs::image_encodings::MONO8);
cv_bridge::CvImageConstPtr cam1_img_ptr = cv_bridge::toCvShare(cam1_img,
sensor_msgs::image_encodings::MONO8);
// Get the current image.
cv_bridge::CvImageConstPtr cam0_img_ptr = cv_bridge::toCvShare(cam0_img,
sensor_msgs::image_encodings::MONO8);
cv_bridge::CvImageConstPtr cam1_img_ptr = cv_bridge::toCvShare(cam1_img,
sensor_msgs::image_encodings::MONO8);
// save image information into moving window
cam0_moving_window[state_server.imu_state.id].image = cam0_img_ptr->image.clone();
cam1_moving_window[state_server.imu_state.id].image = cam1_img_ptr->image.clone();
// save image information into moving window
cam0.moving_window[state_server.imu_state.id].image = cam0_img_ptr->image.clone();
cam1.moving_window[state_server.imu_state.id].image = cam1_img_ptr->image.clone();
//TODO handle any massive overflow correctly (should be pruned, before this ever triggers)
while(cam0_moving_window.size() > 100)
while(cam0.moving_window.size() > 100)
{
cam1_moving_window.erase(cam1_moving_window.begin());
cam0_moving_window.erase(cam0_moving_window.begin());
cam1.moving_window.erase(cam1.moving_window.begin());
cam0.moving_window.erase(cam0.moving_window.begin());
}
}
@ -916,7 +922,7 @@ void MsckfVio::PhotometricMeasurementJacobian(
//photometric observation
std::vector<float> photo_z;
feature.FrameIrradiance(cam_state, cam_state_id, cam0, cam0_moving_window, photo_z);
feature.FrameIrradiance(cam_state, cam_state_id, cam0, photo_z);
// Convert the feature position from the world frame to
// the cam0 and cam1 frame.
@ -975,7 +981,7 @@ void MsckfVio::PhotometricMeasurementJacobian(
//estimate photometric measurement
std::vector<float> estimate_photo_z;
feature.estimate_FrameIrradiance(cam_state, cam_state_id, cam0, cam0_moving_window, estimate_photo_z);
feature.estimate_FrameIrradiance(cam_state, cam_state_id, cam0, estimate_photo_z);
std::vector<float> photo_r;
//calculate photom. residual
@ -1017,6 +1023,8 @@ void MsckfVio::PhotometricFeatureJacobian(
int stack_cntr = 0;
printf("_____FEATURE:_____\n");
cam0.featureVisu.release();
for (const auto& cam_id : valid_cam_state_ids) {
Matrix<double, 4, 6> H_xi = Matrix<double, 4, 6>::Zero();
@ -1034,7 +1042,17 @@ void MsckfVio::PhotometricFeatureJacobian(
r_j.segment<4>(stack_cntr) = r_i;
stack_cntr += 4;
}
if(!cam0.featureVisu.empty() && cam0.featureVisu.size().width > 3000)
imshow("feature", cam0.featureVisu);
cvWaitKey(1);
if((ros::Time::now() - image_save_time).toSec() > 1)
{
std::stringstream ss;
ss << "/home/raphael/dev/MSCKF_ws/img/feature_" << std::to_string(ros::Time::now().toSec()) << ".jpg";
imwrite(ss.str(), cam0.featureVisu);
image_save_time = ros::Time::now();
}
// Project the residual and Jacobians onto the nullspace
// of H_fj.
JacobiSVD<MatrixXd> svd_helper(H_fj, ComputeFullU | ComputeThinV);
@ -1336,7 +1354,7 @@ void MsckfVio::removeLostFeatures() {
}
if(!feature.is_anchored)
{
if(!feature.initializeAnchor(cam0_moving_window, cam0))
if(!feature.initializeAnchor(cam0))
{
invalid_feature_ids.push_back(feature.id);
continue;
@ -1491,7 +1509,7 @@ void MsckfVio::pruneCamStateBuffer() {
}
if(!feature.is_anchored)
{
if(!feature.initializeAnchor(cam0_moving_window, cam0))
if(!feature.initializeAnchor(cam0))
{
for (const auto& cam_id : involved_cam_state_ids)
feature.observations.erase(cam_id);
@ -1573,8 +1591,8 @@ void MsckfVio::pruneCamStateBuffer() {
// Remove this camera state in the state vector.
state_server.cam_states.erase(cam_id);
cam0_moving_window.erase(cam_id);
cam1_moving_window.erase(cam_id);
cam0.moving_window.erase(cam_id);
cam1.moving_window.erase(cam_id);
}
return;