image reprojection visualization in images

This commit is contained in:
2019-04-19 13:11:19 +02:00
parent 1fa2518215
commit 6f16f1b566
4 changed files with 99 additions and 58 deletions

View File

@ -126,7 +126,6 @@ struct Feature {
*/
bool initializeAnchor(
const movingWindow& cam0_moving_window,
const CameraCalibration& cam);
@ -165,15 +164,13 @@ struct Feature {
bool estimate_FrameIrradiance(
const CAMState& cam_state,
const StateIDType& cam_state_id,
const CameraCalibration& cam0,
const movingWindow& cam0_moving_window,
CameraCalibration& cam0,
std::vector<float>& anchorPatch_estimate) const;
bool FrameIrradiance(
const CAMState& cam_state,
const StateIDType& cam_state_id,
const CameraCalibration& cam0,
const movingWindow& cam0_moving_window,
CameraCalibration& cam0,
std::vector<float>& anchorPatch_measurement) const;
/*
@ -368,8 +365,7 @@ bool Feature::checkMotion(
bool Feature::estimate_FrameIrradiance(
const CAMState& cam_state,
const StateIDType& cam_state_id,
const CameraCalibration& cam0,
const movingWindow& cam0_moving_window,
CameraCalibration& cam0,
std::vector<float>& anchorPatch_estimate) const
{
// get irradiance of patch in anchor frame
@ -377,11 +373,11 @@ bool Feature::estimate_FrameIrradiance(
// muliply by a and add b of this frame
auto anchor = observations.begin();
if(cam0_moving_window.find(anchor->first) == cam0_moving_window.end())
if(cam0.moving_window.find(anchor->first) == cam0.moving_window.end())
return false;
double anchorExposureTime_ms = cam0_moving_window.find(anchor->first)->second.exposureTime_ms;
double frameExposureTime_ms = cam0_moving_window.find(cam_state_id)->second.exposureTime_ms;
double anchorExposureTime_ms = cam0.moving_window.find(anchor->first)->second.exposureTime_ms;
double frameExposureTime_ms = cam0.moving_window.find(cam_state_id)->second.exposureTime_ms;
double a_A = anchorExposureTime_ms;
@ -401,12 +397,15 @@ bool Feature::estimate_FrameIrradiance(
bool Feature::FrameIrradiance(
const CAMState& cam_state,
const StateIDType& cam_state_id,
const CameraCalibration& cam0,
const movingWindow& cam0_moving_window,
CameraCalibration& cam0,
std::vector<float>& anchorPatch_measurement) const
{
// project every point in anchorPatch_3d.
// int count = 0;
cv::Mat current_image = cam0.moving_window.find(cam_state_id)->second.image;
cv::Mat dottedFrame(current_image.size(), CV_8UC3);
cv::cvtColor(current_image, dottedFrame, CV_GRAY2RGB);
for (auto point : anchorPatch_3d)
{
// testing
@ -414,8 +413,14 @@ bool Feature::FrameIrradiance(
//if(count == 4)
//printf("\n\ncenter:\n");
cv::Point2f p_in_c0 = projectPositionToCamera(cam_state, cam_state_id, cam0, point);
float irradiance = PixelIrradiance(p_in_c0, cam0_moving_window.find(cam_state_id)->second.image);
cv::Point2f p_in_c0 = projectPositionToCamera(cam_state, cam_state_id, cam0, point);
//visualization of feature
cv::Point xs(p_in_c0.x, p_in_c0.y);
cv::Point ys(p_in_c0.x, p_in_c0.y);
cv::rectangle(dottedFrame, xs, ys, cv::Scalar(0,255,0));
float irradiance = PixelIrradiance(p_in_c0, cam0.moving_window.find(cam_state_id)->second.image);
anchorPatch_measurement.push_back(irradiance);
// testing
@ -424,6 +429,15 @@ bool Feature::FrameIrradiance(
//printf("dist:\n \tpos: %f, %f\n\ttrue pos: %f, %f\n\n", p_in_c0.x, p_in_c0.y, anchor_center_pos.x, anchor_center_pos.y);
}
//visu
//cv::resize(dottedFrame, dottedFrame, cv::Size(dottedFrame.cols*0.2, dottedFrame.rows*0.2));
if(cam0.featureVisu.empty())
cam0.featureVisu = dottedFrame.clone();
else
cv::hconcat(cam0.featureVisu, dottedFrame, cam0.featureVisu);
}
float Feature::PixelIrradiance(cv::Point2f pose, cv::Mat image) const
@ -476,10 +490,8 @@ Eigen::Vector3d Feature::projectPixelToPosition(cv::Point2f in_p,
//printf("%f, %f, %f\n",PositionInWorld[0], PositionInWorld[1], PositionInWorld[2]);
}
//@test center projection must always be initial feature projection
bool Feature::initializeAnchor(
const movingWindow& cam0_moving_window,
const CameraCalibration& cam)
{
@ -489,10 +501,11 @@ bool Feature::initializeAnchor(
int n = (int)(N-1)/2;
auto anchor = observations.begin();
if(cam0_moving_window.find(anchor->first) == cam0_moving_window.end())
if(cam.moving_window.find(anchor->first) == cam.moving_window.end())
return false;
cv::Mat anchorImage = cam0_moving_window.find(anchor->first)->second.image;
cv::Mat anchorImage = cam.moving_window.find(anchor->first)->second.image;
auto u = anchor->second(0);//*cam.intrinsics[0] + cam.intrinsics[2];
auto v = anchor->second(1);//*cam.intrinsics[1] + cam.intrinsics[3];
@ -513,6 +526,8 @@ bool Feature::initializeAnchor(
for(double v_run = -n; v_run <= n; v_run++)
und_pix_v.push_back(cv::Point2f(und_pix_p.x-u_run, und_pix_p.y-v_run));
//create undistorted pure points
std::vector<cv::Point2f> und_v;
image_handler::undistortPoints(und_pix_v,
@ -526,8 +541,9 @@ bool Feature::initializeAnchor(
cam.distortion_model,
cam.distortion_coeffs);
anchor_center_pos = vec[4];
// save anchor position for later visualisaztion
anchor_center_pos = vec[4];
for(auto point : vec)
{
if(point.x - n < 0 || point.x + n >= cam.resolution(0) || point.y - n < 0 || point.y + n >= cam.resolution(1))